1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "src/hydrogen.h"
6 #include "src/hydrogen-gvn.h"
12 class HInstructionMap FINAL : public ZoneObject {
14 HInstructionMap(Zone* zone, SideEffectsTracker* side_effects_tracker)
20 free_list_head_(kNil),
21 side_effects_tracker_(side_effects_tracker) {
22 ResizeLists(kInitialSize, zone);
23 Resize(kInitialSize, zone);
26 void Kill(SideEffects side_effects);
28 void Add(HInstruction* instr, Zone* zone) {
29 present_depends_on_.Add(side_effects_tracker_->ComputeDependsOn(instr));
33 HInstruction* Lookup(HInstruction* instr) const;
35 HInstructionMap* Copy(Zone* zone) const {
36 return new(zone) HInstructionMap(zone, this);
39 bool IsEmpty() const { return count_ == 0; }
42 // A linked list of HInstruction* values. Stored in arrays.
43 struct HInstructionMapListElement {
45 int next; // Index in the array of the next list element.
47 static const int kNil = -1; // The end of a linked list
49 // Must be a power of 2.
50 static const int kInitialSize = 16;
52 HInstructionMap(Zone* zone, const HInstructionMap* other);
54 void Resize(int new_size, Zone* zone);
55 void ResizeLists(int new_size, Zone* zone);
56 void Insert(HInstruction* instr, Zone* zone);
57 uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); }
61 int count_; // The number of values stored in the HInstructionMap.
62 SideEffects present_depends_on_;
63 HInstructionMapListElement* array_;
64 // Primary store - contains the first value
65 // with a given hash. Colliding elements are stored in linked lists.
66 HInstructionMapListElement* lists_;
67 // The linked lists containing hash collisions.
68 int free_list_head_; // Unused elements in lists_ are on the free list.
69 SideEffectsTracker* side_effects_tracker_;
73 class HSideEffectMap FINAL BASE_EMBEDDED {
76 explicit HSideEffectMap(HSideEffectMap* other);
77 HSideEffectMap& operator= (const HSideEffectMap& other);
79 void Kill(SideEffects side_effects);
81 void Store(SideEffects side_effects, HInstruction* instr);
83 bool IsEmpty() const { return count_ == 0; }
85 inline HInstruction* operator[](int i) const {
87 DCHECK(i < kNumberOfTrackedSideEffects);
90 inline HInstruction* at(int i) const { return operator[](i); }
94 HInstruction* data_[kNumberOfTrackedSideEffects];
98 void TraceGVN(const char* msg, ...) {
100 va_start(arguments, msg);
101 base::OS::VPrint(msg, arguments);
106 // Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when
107 // --trace-gvn is off.
108 #define TRACE_GVN_1(msg, a1) \
109 if (FLAG_trace_gvn) { \
113 #define TRACE_GVN_2(msg, a1, a2) \
114 if (FLAG_trace_gvn) { \
115 TraceGVN(msg, a1, a2); \
118 #define TRACE_GVN_3(msg, a1, a2, a3) \
119 if (FLAG_trace_gvn) { \
120 TraceGVN(msg, a1, a2, a3); \
123 #define TRACE_GVN_4(msg, a1, a2, a3, a4) \
124 if (FLAG_trace_gvn) { \
125 TraceGVN(msg, a1, a2, a3, a4); \
128 #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \
129 if (FLAG_trace_gvn) { \
130 TraceGVN(msg, a1, a2, a3, a4, a5); \
134 HInstructionMap::HInstructionMap(Zone* zone, const HInstructionMap* other)
135 : array_size_(other->array_size_),
136 lists_size_(other->lists_size_),
137 count_(other->count_),
138 present_depends_on_(other->present_depends_on_),
139 array_(zone->NewArray<HInstructionMapListElement>(other->array_size_)),
140 lists_(zone->NewArray<HInstructionMapListElement>(other->lists_size_)),
141 free_list_head_(other->free_list_head_),
142 side_effects_tracker_(other->side_effects_tracker_) {
143 MemCopy(array_, other->array_,
144 array_size_ * sizeof(HInstructionMapListElement));
145 MemCopy(lists_, other->lists_,
146 lists_size_ * sizeof(HInstructionMapListElement));
150 void HInstructionMap::Kill(SideEffects changes) {
151 if (!present_depends_on_.ContainsAnyOf(changes)) return;
152 present_depends_on_.RemoveAll();
153 for (int i = 0; i < array_size_; ++i) {
154 HInstruction* instr = array_[i].instr;
156 // Clear list of collisions first, so we know if it becomes empty.
157 int kept = kNil; // List of kept elements.
159 for (int current = array_[i].next; current != kNil; current = next) {
160 next = lists_[current].next;
161 HInstruction* instr = lists_[current].instr;
162 SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr);
163 if (depends_on.ContainsAnyOf(changes)) {
166 lists_[current].next = free_list_head_;
167 free_list_head_ = current;
170 lists_[current].next = kept;
172 present_depends_on_.Add(depends_on);
175 array_[i].next = kept;
177 // Now possibly drop directly indexed element.
178 instr = array_[i].instr;
179 SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr);
180 if (depends_on.ContainsAnyOf(changes)) { // Drop it.
182 int head = array_[i].next;
184 array_[i].instr = NULL;
186 array_[i].instr = lists_[head].instr;
187 array_[i].next = lists_[head].next;
188 lists_[head].next = free_list_head_;
189 free_list_head_ = head;
192 present_depends_on_.Add(depends_on); // Keep it.
199 HInstruction* HInstructionMap::Lookup(HInstruction* instr) const {
200 uint32_t hash = static_cast<uint32_t>(instr->Hashcode());
201 uint32_t pos = Bound(hash);
202 if (array_[pos].instr != NULL) {
203 if (array_[pos].instr->Equals(instr)) return array_[pos].instr;
204 int next = array_[pos].next;
205 while (next != kNil) {
206 if (lists_[next].instr->Equals(instr)) return lists_[next].instr;
207 next = lists_[next].next;
214 void HInstructionMap::Resize(int new_size, Zone* zone) {
215 DCHECK(new_size > count_);
216 // Hashing the values into the new array has no more collisions than in the
217 // old hash map, so we can use the existing lists_ array, if we are careful.
219 // Make sure we have at least one free element.
220 if (free_list_head_ == kNil) {
221 ResizeLists(lists_size_ << 1, zone);
224 HInstructionMapListElement* new_array =
225 zone->NewArray<HInstructionMapListElement>(new_size);
226 memset(new_array, 0, sizeof(HInstructionMapListElement) * new_size);
228 HInstructionMapListElement* old_array = array_;
229 int old_size = array_size_;
231 int old_count = count_;
233 // Do not modify present_depends_on_. It is currently correct.
234 array_size_ = new_size;
237 if (old_array != NULL) {
238 // Iterate over all the elements in lists, rehashing them.
239 for (int i = 0; i < old_size; ++i) {
240 if (old_array[i].instr != NULL) {
241 int current = old_array[i].next;
242 while (current != kNil) {
243 Insert(lists_[current].instr, zone);
244 int next = lists_[current].next;
245 lists_[current].next = free_list_head_;
246 free_list_head_ = current;
249 // Rehash the directly stored instruction.
250 Insert(old_array[i].instr, zone);
255 DCHECK(count_ == old_count);
259 void HInstructionMap::ResizeLists(int new_size, Zone* zone) {
260 DCHECK(new_size > lists_size_);
262 HInstructionMapListElement* new_lists =
263 zone->NewArray<HInstructionMapListElement>(new_size);
264 memset(new_lists, 0, sizeof(HInstructionMapListElement) * new_size);
266 HInstructionMapListElement* old_lists = lists_;
267 int old_size = lists_size_;
269 lists_size_ = new_size;
272 if (old_lists != NULL) {
273 MemCopy(lists_, old_lists, old_size * sizeof(HInstructionMapListElement));
275 for (int i = old_size; i < lists_size_; ++i) {
276 lists_[i].next = free_list_head_;
282 void HInstructionMap::Insert(HInstruction* instr, Zone* zone) {
283 DCHECK(instr != NULL);
284 // Resizing when half of the hashtable is filled up.
285 if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone);
286 DCHECK(count_ < array_size_);
288 uint32_t pos = Bound(static_cast<uint32_t>(instr->Hashcode()));
289 if (array_[pos].instr == NULL) {
290 array_[pos].instr = instr;
291 array_[pos].next = kNil;
293 if (free_list_head_ == kNil) {
294 ResizeLists(lists_size_ << 1, zone);
296 int new_element_pos = free_list_head_;
297 DCHECK(new_element_pos != kNil);
298 free_list_head_ = lists_[free_list_head_].next;
299 lists_[new_element_pos].instr = instr;
300 lists_[new_element_pos].next = array_[pos].next;
301 DCHECK(array_[pos].next == kNil || lists_[array_[pos].next].instr != NULL);
302 array_[pos].next = new_element_pos;
307 HSideEffectMap::HSideEffectMap() : count_(0) {
308 memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize);
312 HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) {
313 *this = *other; // Calls operator=.
317 HSideEffectMap& HSideEffectMap::operator=(const HSideEffectMap& other) {
318 if (this != &other) {
319 MemCopy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize);
325 void HSideEffectMap::Kill(SideEffects side_effects) {
326 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
327 if (side_effects.ContainsFlag(GVNFlagFromInt(i))) {
328 if (data_[i] != NULL) count_--;
335 void HSideEffectMap::Store(SideEffects side_effects, HInstruction* instr) {
336 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
337 if (side_effects.ContainsFlag(GVNFlagFromInt(i))) {
338 if (data_[i] == NULL) count_++;
345 SideEffects SideEffectsTracker::ComputeChanges(HInstruction* instr) {
347 SideEffects result(instr->ChangesFlags());
348 if (result.ContainsFlag(kGlobalVars)) {
349 if (instr->IsStoreNamedField()) {
350 HStoreNamedField* store = HStoreNamedField::cast(instr);
351 HConstant* target = HConstant::cast(store->object());
352 if (ComputeGlobalVar(Unique<PropertyCell>::cast(target->GetUnique()),
354 result.RemoveFlag(kGlobalVars);
355 result.AddSpecial(GlobalVar(index));
359 for (index = 0; index < kNumberOfGlobalVars; ++index) {
360 result.AddSpecial(GlobalVar(index));
362 } else if (result.ContainsFlag(kInobjectFields)) {
363 if (instr->IsStoreNamedField() &&
364 ComputeInobjectField(HStoreNamedField::cast(instr)->access(), &index)) {
365 result.RemoveFlag(kInobjectFields);
366 result.AddSpecial(InobjectField(index));
368 for (index = 0; index < kNumberOfInobjectFields; ++index) {
369 result.AddSpecial(InobjectField(index));
377 SideEffects SideEffectsTracker::ComputeDependsOn(HInstruction* instr) {
379 SideEffects result(instr->DependsOnFlags());
380 if (result.ContainsFlag(kGlobalVars)) {
381 if (instr->IsLoadNamedField()) {
382 HLoadNamedField* load = HLoadNamedField::cast(instr);
383 HConstant* target = HConstant::cast(load->object());
384 if (ComputeGlobalVar(Unique<PropertyCell>::cast(target->GetUnique()),
386 result.RemoveFlag(kGlobalVars);
387 result.AddSpecial(GlobalVar(index));
391 for (index = 0; index < kNumberOfGlobalVars; ++index) {
392 result.AddSpecial(GlobalVar(index));
394 } else if (result.ContainsFlag(kInobjectFields)) {
395 if (instr->IsLoadNamedField() &&
396 ComputeInobjectField(HLoadNamedField::cast(instr)->access(), &index)) {
397 result.RemoveFlag(kInobjectFields);
398 result.AddSpecial(InobjectField(index));
400 for (index = 0; index < kNumberOfInobjectFields; ++index) {
401 result.AddSpecial(InobjectField(index));
409 std::ostream& operator<<(std::ostream& os, const TrackedEffects& te) {
410 SideEffectsTracker* t = te.tracker;
411 const char* separator = "";
413 for (int bit = 0; bit < kNumberOfFlags; ++bit) {
414 GVNFlag flag = GVNFlagFromInt(bit);
415 if (te.effects.ContainsFlag(flag)) {
419 #define DECLARE_FLAG(Type) \
423 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG)
424 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG)
431 for (int index = 0; index < t->num_global_vars_; ++index) {
432 if (te.effects.ContainsSpecial(t->GlobalVar(index))) {
433 os << separator << "[" << *t->global_vars_[index].handle() << "]";
437 for (int index = 0; index < t->num_inobject_fields_; ++index) {
438 if (te.effects.ContainsSpecial(t->InobjectField(index))) {
439 os << separator << t->inobject_fields_[index];
448 bool SideEffectsTracker::ComputeGlobalVar(Unique<PropertyCell> cell,
450 for (int i = 0; i < num_global_vars_; ++i) {
451 if (cell == global_vars_[i]) {
456 if (num_global_vars_ < kNumberOfGlobalVars) {
457 if (FLAG_trace_gvn) {
459 os << "Tracking global var [" << *cell.handle() << "] "
460 << "(mapped to index " << num_global_vars_ << ")" << std::endl;
462 *index = num_global_vars_;
463 global_vars_[num_global_vars_++] = cell;
470 bool SideEffectsTracker::ComputeInobjectField(HObjectAccess access,
472 for (int i = 0; i < num_inobject_fields_; ++i) {
473 if (access.Equals(inobject_fields_[i])) {
478 if (num_inobject_fields_ < kNumberOfInobjectFields) {
479 if (FLAG_trace_gvn) {
481 os << "Tracking inobject field access " << access << " (mapped to index "
482 << num_inobject_fields_ << ")" << std::endl;
484 *index = num_inobject_fields_;
485 inobject_fields_[num_inobject_fields_++] = access;
492 HGlobalValueNumberingPhase::HGlobalValueNumberingPhase(HGraph* graph)
493 : HPhase("H_Global value numbering", graph),
494 removed_side_effects_(false),
495 block_side_effects_(graph->blocks()->length(), zone()),
496 loop_side_effects_(graph->blocks()->length(), zone()),
497 visited_on_paths_(graph->blocks()->length(), zone()) {
498 DCHECK(!AllowHandleAllocation::IsAllowed());
499 block_side_effects_.AddBlock(
500 SideEffects(), graph->blocks()->length(), zone());
501 loop_side_effects_.AddBlock(
502 SideEffects(), graph->blocks()->length(), zone());
506 void HGlobalValueNumberingPhase::Run() {
507 DCHECK(!removed_side_effects_);
508 for (int i = FLAG_gvn_iterations; i > 0; --i) {
509 // Compute the side effects.
510 ComputeBlockSideEffects();
512 // Perform loop invariant code motion if requested.
513 if (FLAG_loop_invariant_code_motion) LoopInvariantCodeMotion();
515 // Perform the actual value numbering.
518 // Continue GVN if we removed any side effects.
519 if (!removed_side_effects_) break;
520 removed_side_effects_ = false;
522 // Clear all side effects.
523 DCHECK_EQ(block_side_effects_.length(), graph()->blocks()->length());
524 DCHECK_EQ(loop_side_effects_.length(), graph()->blocks()->length());
525 for (int i = 0; i < graph()->blocks()->length(); ++i) {
526 block_side_effects_[i].RemoveAll();
527 loop_side_effects_[i].RemoveAll();
529 visited_on_paths_.Clear();
534 void HGlobalValueNumberingPhase::ComputeBlockSideEffects() {
535 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) {
536 // Compute side effects for the block.
537 HBasicBlock* block = graph()->blocks()->at(i);
538 SideEffects side_effects;
539 if (block->IsReachable() && !block->IsDeoptimizing()) {
540 int id = block->block_id();
541 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
542 HInstruction* instr = it.Current();
543 side_effects.Add(side_effects_tracker_.ComputeChanges(instr));
545 block_side_effects_[id].Add(side_effects);
547 // Loop headers are part of their loop.
548 if (block->IsLoopHeader()) {
549 loop_side_effects_[id].Add(side_effects);
552 // Propagate loop side effects upwards.
553 if (block->HasParentLoopHeader()) {
554 HBasicBlock* with_parent = block;
555 if (block->IsLoopHeader()) side_effects = loop_side_effects_[id];
557 HBasicBlock* parent_block = with_parent->parent_loop_header();
558 loop_side_effects_[parent_block->block_id()].Add(side_effects);
559 with_parent = parent_block;
560 } while (with_parent->HasParentLoopHeader());
567 void HGlobalValueNumberingPhase::LoopInvariantCodeMotion() {
568 TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n",
569 graph()->use_optimistic_licm() ? "yes" : "no");
570 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) {
571 HBasicBlock* block = graph()->blocks()->at(i);
572 if (block->IsLoopHeader()) {
573 SideEffects side_effects = loop_side_effects_[block->block_id()];
574 if (FLAG_trace_gvn) {
576 os << "Try loop invariant motion for " << *block << " changes "
577 << Print(side_effects) << std::endl;
579 HBasicBlock* last = block->loop_information()->GetLastBackEdge();
580 for (int j = block->block_id(); j <= last->block_id(); ++j) {
581 ProcessLoopBlock(graph()->blocks()->at(j), block, side_effects);
588 void HGlobalValueNumberingPhase::ProcessLoopBlock(
590 HBasicBlock* loop_header,
591 SideEffects loop_kills) {
592 HBasicBlock* pre_header = loop_header->predecessors()->at(0);
593 if (FLAG_trace_gvn) {
595 os << "Loop invariant code motion for " << *block << " depends on "
596 << Print(loop_kills) << std::endl;
598 HInstruction* instr = block->first();
599 while (instr != NULL) {
600 HInstruction* next = instr->next();
601 if (instr->CheckFlag(HValue::kUseGVN)) {
602 SideEffects changes = side_effects_tracker_.ComputeChanges(instr);
603 SideEffects depends_on = side_effects_tracker_.ComputeDependsOn(instr);
604 if (FLAG_trace_gvn) {
606 os << "Checking instruction i" << instr->id() << " ("
607 << instr->Mnemonic() << ") changes " << Print(changes)
608 << ", depends on " << Print(depends_on) << ". Loop changes "
609 << Print(loop_kills) << std::endl;
611 bool can_hoist = !depends_on.ContainsAnyOf(loop_kills);
612 if (can_hoist && !graph()->use_optimistic_licm()) {
613 can_hoist = block->IsLoopSuccessorDominator();
617 bool inputs_loop_invariant = true;
618 for (int i = 0; i < instr->OperandCount(); ++i) {
619 if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
620 inputs_loop_invariant = false;
624 if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
625 TRACE_GVN_2("Hoisting loop invariant instruction i%d to block B%d\n",
626 instr->id(), pre_header->block_id());
627 // Move the instruction out of the loop.
629 instr->InsertBefore(pre_header->end());
630 if (instr->HasSideEffects()) removed_side_effects_ = true;
639 bool HGlobalValueNumberingPhase::AllowCodeMotion() {
640 return info()->IsStub() || info()->opt_count() + 1 < FLAG_max_opt_count;
644 bool HGlobalValueNumberingPhase::ShouldMove(HInstruction* instr,
645 HBasicBlock* loop_header) {
646 // If we've disabled code motion or we're in a block that unconditionally
647 // deoptimizes, don't move any instructions.
648 return AllowCodeMotion() && !instr->block()->IsDeoptimizing() &&
649 instr->block()->IsReachable();
654 HGlobalValueNumberingPhase::CollectSideEffectsOnPathsToDominatedBlock(
655 HBasicBlock* dominator, HBasicBlock* dominated) {
656 SideEffects side_effects;
657 for (int i = 0; i < dominated->predecessors()->length(); ++i) {
658 HBasicBlock* block = dominated->predecessors()->at(i);
659 if (dominator->block_id() < block->block_id() &&
660 block->block_id() < dominated->block_id() &&
661 !visited_on_paths_.Contains(block->block_id())) {
662 visited_on_paths_.Add(block->block_id());
663 side_effects.Add(block_side_effects_[block->block_id()]);
664 if (block->IsLoopHeader()) {
665 side_effects.Add(loop_side_effects_[block->block_id()]);
667 side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock(
675 // Each instance of this class is like a "stack frame" for the recursive
676 // traversal of the dominator tree done during GVN (the stack is handled
677 // as a double linked list).
678 // We reuse frames when possible so the list length is limited by the depth
679 // of the dominator tree but this forces us to initialize each frame calling
680 // an explicit "Initialize" method instead of a using constructor.
681 class GvnBasicBlockState: public ZoneObject {
683 static GvnBasicBlockState* CreateEntry(Zone* zone,
684 HBasicBlock* entry_block,
685 HInstructionMap* entry_map) {
687 GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone);
690 HBasicBlock* block() { return block_; }
691 HInstructionMap* map() { return map_; }
692 HSideEffectMap* dominators() { return &dominators_; }
694 GvnBasicBlockState* next_in_dominator_tree_traversal(
696 HBasicBlock** dominator) {
697 // This assignment needs to happen before calling next_dominated() because
698 // that call can reuse "this" if we are at the last dominated block.
699 *dominator = block();
700 GvnBasicBlockState* result = next_dominated(zone);
701 if (result == NULL) {
702 GvnBasicBlockState* dominator_state = pop();
703 if (dominator_state != NULL) {
704 // This branch is guaranteed not to return NULL because pop() never
705 // returns a state where "is_done() == true".
706 *dominator = dominator_state->block();
707 result = dominator_state->next_dominated(zone);
709 // Unnecessary (we are returning NULL) but done for cleanness.
717 void Initialize(HBasicBlock* block,
718 HInstructionMap* map,
719 HSideEffectMap* dominators,
723 map_ = copy_map ? map->Copy(zone) : map;
724 dominated_index_ = -1;
725 length_ = block->dominated_blocks()->length();
726 if (dominators != NULL) {
727 dominators_ = *dominators;
730 bool is_done() { return dominated_index_ >= length_; }
732 GvnBasicBlockState(GvnBasicBlockState* previous,
734 HInstructionMap* map,
735 HSideEffectMap* dominators,
737 : previous_(previous), next_(NULL) {
738 Initialize(block, map, dominators, true, zone);
741 GvnBasicBlockState* next_dominated(Zone* zone) {
743 if (dominated_index_ == length_ - 1) {
744 // No need to copy the map for the last child in the dominator tree.
745 Initialize(block_->dominated_blocks()->at(dominated_index_),
751 } else if (dominated_index_ < length_) {
752 return push(zone, block_->dominated_blocks()->at(dominated_index_));
758 GvnBasicBlockState* push(Zone* zone, HBasicBlock* block) {
761 new(zone) GvnBasicBlockState(this, block, map(), dominators(), zone);
763 next_->Initialize(block, map(), dominators(), true, zone);
767 GvnBasicBlockState* pop() {
768 GvnBasicBlockState* result = previous_;
769 while (result != NULL && result->is_done()) {
770 TRACE_GVN_2("Backtracking from block B%d to block b%d\n",
772 previous_->block()->block_id())
773 result = result->previous_;
778 GvnBasicBlockState* previous_;
779 GvnBasicBlockState* next_;
781 HInstructionMap* map_;
782 HSideEffectMap dominators_;
783 int dominated_index_;
788 // This is a recursive traversal of the dominator tree but it has been turned
789 // into a loop to avoid stack overflows.
790 // The logical "stack frames" of the recursion are kept in a list of
791 // GvnBasicBlockState instances.
792 void HGlobalValueNumberingPhase::AnalyzeGraph() {
793 HBasicBlock* entry_block = graph()->entry_block();
794 HInstructionMap* entry_map =
795 new(zone()) HInstructionMap(zone(), &side_effects_tracker_);
796 GvnBasicBlockState* current =
797 GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map);
799 while (current != NULL) {
800 HBasicBlock* block = current->block();
801 HInstructionMap* map = current->map();
802 HSideEffectMap* dominators = current->dominators();
804 TRACE_GVN_2("Analyzing block B%d%s\n",
806 block->IsLoopHeader() ? " (loop header)" : "");
808 // If this is a loop header kill everything killed by the loop.
809 if (block->IsLoopHeader()) {
810 map->Kill(loop_side_effects_[block->block_id()]);
811 dominators->Kill(loop_side_effects_[block->block_id()]);
814 // Go through all instructions of the current block.
815 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
816 HInstruction* instr = it.Current();
817 if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) {
818 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
819 HValue* other = dominators->at(i);
820 GVNFlag flag = GVNFlagFromInt(i);
821 if (instr->DependsOnFlags().Contains(flag) && other != NULL) {
822 TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n",
828 if (instr->HandleSideEffectDominator(flag, other)) {
829 removed_side_effects_ = true;
834 // Instruction was unlinked during graph traversal.
835 if (!instr->IsLinked()) continue;
837 SideEffects changes = side_effects_tracker_.ComputeChanges(instr);
838 if (!changes.IsEmpty()) {
839 // Clear all instructions in the map that are affected by side effects.
840 // Store instruction as the dominating one for tracked side effects.
842 dominators->Store(changes, instr);
843 if (FLAG_trace_gvn) {
845 os << "Instruction i" << instr->id() << " changes " << Print(changes)
849 if (instr->CheckFlag(HValue::kUseGVN) &&
850 !instr->CheckFlag(HValue::kCantBeReplaced)) {
851 DCHECK(!instr->HasObservableSideEffects());
852 HInstruction* other = map->Lookup(instr);
854 DCHECK(instr->Equals(other) && other->Equals(instr));
855 TRACE_GVN_4("Replacing instruction i%d (%s) with i%d (%s)\n",
860 if (instr->HasSideEffects()) removed_side_effects_ = true;
861 instr->DeleteAndReplaceWith(other);
863 map->Add(instr, zone());
868 HBasicBlock* dominator_block;
869 GvnBasicBlockState* next =
870 current->next_in_dominator_tree_traversal(zone(),
874 HBasicBlock* dominated = next->block();
875 HInstructionMap* successor_map = next->map();
876 HSideEffectMap* successor_dominators = next->dominators();
878 // Kill everything killed on any path between this block and the
879 // dominated block. We don't have to traverse these paths if the
880 // value map and the dominators list is already empty. If the range
881 // of block ids (block_id, dominated_id) is empty there are no such
883 if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) &&
884 dominator_block->block_id() + 1 < dominated->block_id()) {
885 visited_on_paths_.Clear();
886 SideEffects side_effects_on_all_paths =
887 CollectSideEffectsOnPathsToDominatedBlock(dominator_block,
889 successor_map->Kill(side_effects_on_all_paths);
890 successor_dominators->Kill(side_effects_on_all_paths);
897 } } // namespace v8::internal