1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 #include "hydrogen-gvn.h"
35 class HInstructionMap V8_FINAL : public ZoneObject {
37 HInstructionMap(Zone* zone, SideEffectsTracker* side_effects_tracker)
43 free_list_head_(kNil),
44 side_effects_tracker_(side_effects_tracker) {
45 ResizeLists(kInitialSize, zone);
46 Resize(kInitialSize, zone);
49 void Kill(SideEffects side_effects);
51 void Add(HInstruction* instr, Zone* zone) {
52 present_depends_on_.Add(side_effects_tracker_->ComputeDependsOn(instr));
56 HInstruction* Lookup(HInstruction* instr) const;
58 HInstructionMap* Copy(Zone* zone) const {
59 return new(zone) HInstructionMap(zone, this);
62 bool IsEmpty() const { return count_ == 0; }
65 // A linked list of HInstruction* values. Stored in arrays.
66 struct HInstructionMapListElement {
68 int next; // Index in the array of the next list element.
70 static const int kNil = -1; // The end of a linked list
72 // Must be a power of 2.
73 static const int kInitialSize = 16;
75 HInstructionMap(Zone* zone, const HInstructionMap* other);
77 void Resize(int new_size, Zone* zone);
78 void ResizeLists(int new_size, Zone* zone);
79 void Insert(HInstruction* instr, Zone* zone);
80 uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); }
84 int count_; // The number of values stored in the HInstructionMap.
85 SideEffects present_depends_on_;
86 HInstructionMapListElement* array_;
87 // Primary store - contains the first value
88 // with a given hash. Colliding elements are stored in linked lists.
89 HInstructionMapListElement* lists_;
90 // The linked lists containing hash collisions.
91 int free_list_head_; // Unused elements in lists_ are on the free list.
92 SideEffectsTracker* side_effects_tracker_;
96 class HSideEffectMap V8_FINAL BASE_EMBEDDED {
99 explicit HSideEffectMap(HSideEffectMap* other);
100 HSideEffectMap& operator= (const HSideEffectMap& other);
102 void Kill(SideEffects side_effects);
104 void Store(SideEffects side_effects, HInstruction* instr);
106 bool IsEmpty() const { return count_ == 0; }
108 inline HInstruction* operator[](int i) const {
110 ASSERT(i < kNumberOfTrackedSideEffects);
113 inline HInstruction* at(int i) const { return operator[](i); }
117 HInstruction* data_[kNumberOfTrackedSideEffects];
121 void TraceGVN(const char* msg, ...) {
123 va_start(arguments, msg);
124 OS::VPrint(msg, arguments);
129 // Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when
130 // --trace-gvn is off.
131 #define TRACE_GVN_1(msg, a1) \
132 if (FLAG_trace_gvn) { \
136 #define TRACE_GVN_2(msg, a1, a2) \
137 if (FLAG_trace_gvn) { \
138 TraceGVN(msg, a1, a2); \
141 #define TRACE_GVN_3(msg, a1, a2, a3) \
142 if (FLAG_trace_gvn) { \
143 TraceGVN(msg, a1, a2, a3); \
146 #define TRACE_GVN_4(msg, a1, a2, a3, a4) \
147 if (FLAG_trace_gvn) { \
148 TraceGVN(msg, a1, a2, a3, a4); \
151 #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \
152 if (FLAG_trace_gvn) { \
153 TraceGVN(msg, a1, a2, a3, a4, a5); \
157 HInstructionMap::HInstructionMap(Zone* zone, const HInstructionMap* other)
158 : array_size_(other->array_size_),
159 lists_size_(other->lists_size_),
160 count_(other->count_),
161 present_depends_on_(other->present_depends_on_),
162 array_(zone->NewArray<HInstructionMapListElement>(other->array_size_)),
163 lists_(zone->NewArray<HInstructionMapListElement>(other->lists_size_)),
164 free_list_head_(other->free_list_head_),
165 side_effects_tracker_(other->side_effects_tracker_) {
167 array_, other->array_, array_size_ * sizeof(HInstructionMapListElement));
169 lists_, other->lists_, lists_size_ * sizeof(HInstructionMapListElement));
173 void HInstructionMap::Kill(SideEffects changes) {
174 if (!present_depends_on_.ContainsAnyOf(changes)) return;
175 present_depends_on_.RemoveAll();
176 for (int i = 0; i < array_size_; ++i) {
177 HInstruction* instr = array_[i].instr;
179 // Clear list of collisions first, so we know if it becomes empty.
180 int kept = kNil; // List of kept elements.
182 for (int current = array_[i].next; current != kNil; current = next) {
183 next = lists_[current].next;
184 HInstruction* instr = lists_[current].instr;
185 SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr);
186 if (depends_on.ContainsAnyOf(changes)) {
189 lists_[current].next = free_list_head_;
190 free_list_head_ = current;
193 lists_[current].next = kept;
195 present_depends_on_.Add(depends_on);
198 array_[i].next = kept;
200 // Now possibly drop directly indexed element.
201 instr = array_[i].instr;
202 SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr);
203 if (depends_on.ContainsAnyOf(changes)) { // Drop it.
205 int head = array_[i].next;
207 array_[i].instr = NULL;
209 array_[i].instr = lists_[head].instr;
210 array_[i].next = lists_[head].next;
211 lists_[head].next = free_list_head_;
212 free_list_head_ = head;
215 present_depends_on_.Add(depends_on); // Keep it.
222 HInstruction* HInstructionMap::Lookup(HInstruction* instr) const {
223 uint32_t hash = static_cast<uint32_t>(instr->Hashcode());
224 uint32_t pos = Bound(hash);
225 if (array_[pos].instr != NULL) {
226 if (array_[pos].instr->Equals(instr)) return array_[pos].instr;
227 int next = array_[pos].next;
228 while (next != kNil) {
229 if (lists_[next].instr->Equals(instr)) return lists_[next].instr;
230 next = lists_[next].next;
237 void HInstructionMap::Resize(int new_size, Zone* zone) {
238 ASSERT(new_size > count_);
239 // Hashing the values into the new array has no more collisions than in the
240 // old hash map, so we can use the existing lists_ array, if we are careful.
242 // Make sure we have at least one free element.
243 if (free_list_head_ == kNil) {
244 ResizeLists(lists_size_ << 1, zone);
247 HInstructionMapListElement* new_array =
248 zone->NewArray<HInstructionMapListElement>(new_size);
249 memset(new_array, 0, sizeof(HInstructionMapListElement) * new_size);
251 HInstructionMapListElement* old_array = array_;
252 int old_size = array_size_;
254 int old_count = count_;
256 // Do not modify present_depends_on_. It is currently correct.
257 array_size_ = new_size;
260 if (old_array != NULL) {
261 // Iterate over all the elements in lists, rehashing them.
262 for (int i = 0; i < old_size; ++i) {
263 if (old_array[i].instr != NULL) {
264 int current = old_array[i].next;
265 while (current != kNil) {
266 Insert(lists_[current].instr, zone);
267 int next = lists_[current].next;
268 lists_[current].next = free_list_head_;
269 free_list_head_ = current;
272 // Rehash the directly stored instruction.
273 Insert(old_array[i].instr, zone);
278 ASSERT(count_ == old_count);
282 void HInstructionMap::ResizeLists(int new_size, Zone* zone) {
283 ASSERT(new_size > lists_size_);
285 HInstructionMapListElement* new_lists =
286 zone->NewArray<HInstructionMapListElement>(new_size);
287 memset(new_lists, 0, sizeof(HInstructionMapListElement) * new_size);
289 HInstructionMapListElement* old_lists = lists_;
290 int old_size = lists_size_;
292 lists_size_ = new_size;
295 if (old_lists != NULL) {
297 lists_, old_lists, old_size * sizeof(HInstructionMapListElement));
299 for (int i = old_size; i < lists_size_; ++i) {
300 lists_[i].next = free_list_head_;
306 void HInstructionMap::Insert(HInstruction* instr, Zone* zone) {
307 ASSERT(instr != NULL);
308 // Resizing when half of the hashtable is filled up.
309 if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone);
310 ASSERT(count_ < array_size_);
312 uint32_t pos = Bound(static_cast<uint32_t>(instr->Hashcode()));
313 if (array_[pos].instr == NULL) {
314 array_[pos].instr = instr;
315 array_[pos].next = kNil;
317 if (free_list_head_ == kNil) {
318 ResizeLists(lists_size_ << 1, zone);
320 int new_element_pos = free_list_head_;
321 ASSERT(new_element_pos != kNil);
322 free_list_head_ = lists_[free_list_head_].next;
323 lists_[new_element_pos].instr = instr;
324 lists_[new_element_pos].next = array_[pos].next;
325 ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].instr != NULL);
326 array_[pos].next = new_element_pos;
331 HSideEffectMap::HSideEffectMap() : count_(0) {
332 memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize);
336 HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) {
337 *this = *other; // Calls operator=.
341 HSideEffectMap& HSideEffectMap::operator= (const HSideEffectMap& other) {
342 if (this != &other) {
343 OS::MemCopy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize);
349 void HSideEffectMap::Kill(SideEffects side_effects) {
350 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
351 if (side_effects.ContainsFlag(GVNFlagFromInt(i))) {
352 if (data_[i] != NULL) count_--;
359 void HSideEffectMap::Store(SideEffects side_effects, HInstruction* instr) {
360 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
361 if (side_effects.ContainsFlag(GVNFlagFromInt(i))) {
362 if (data_[i] == NULL) count_++;
369 SideEffects SideEffectsTracker::ComputeChanges(HInstruction* instr) {
371 SideEffects result(instr->ChangesFlags());
372 if (result.ContainsFlag(kGlobalVars)) {
373 if (instr->IsStoreGlobalCell() &&
374 ComputeGlobalVar(HStoreGlobalCell::cast(instr)->cell(), &index)) {
375 result.RemoveFlag(kGlobalVars);
376 result.AddSpecial(GlobalVar(index));
378 for (index = 0; index < kNumberOfGlobalVars; ++index) {
379 result.AddSpecial(GlobalVar(index));
383 if (result.ContainsFlag(kInobjectFields)) {
384 if (instr->IsStoreNamedField() &&
385 ComputeInobjectField(HStoreNamedField::cast(instr)->access(), &index)) {
386 result.RemoveFlag(kInobjectFields);
387 result.AddSpecial(InobjectField(index));
389 for (index = 0; index < kNumberOfInobjectFields; ++index) {
390 result.AddSpecial(InobjectField(index));
398 SideEffects SideEffectsTracker::ComputeDependsOn(HInstruction* instr) {
400 SideEffects result(instr->DependsOnFlags());
401 if (result.ContainsFlag(kGlobalVars)) {
402 if (instr->IsLoadGlobalCell() &&
403 ComputeGlobalVar(HLoadGlobalCell::cast(instr)->cell(), &index)) {
404 result.RemoveFlag(kGlobalVars);
405 result.AddSpecial(GlobalVar(index));
407 for (index = 0; index < kNumberOfGlobalVars; ++index) {
408 result.AddSpecial(GlobalVar(index));
412 if (result.ContainsFlag(kInobjectFields)) {
413 if (instr->IsLoadNamedField() &&
414 ComputeInobjectField(HLoadNamedField::cast(instr)->access(), &index)) {
415 result.RemoveFlag(kInobjectFields);
416 result.AddSpecial(InobjectField(index));
418 for (index = 0; index < kNumberOfInobjectFields; ++index) {
419 result.AddSpecial(InobjectField(index));
427 void SideEffectsTracker::PrintSideEffectsTo(StringStream* stream,
428 SideEffects side_effects) const {
429 const char* separator = "";
431 for (int bit = 0; bit < kNumberOfFlags; ++bit) {
432 GVNFlag flag = GVNFlagFromInt(bit);
433 if (side_effects.ContainsFlag(flag)) {
434 stream->Add(separator);
437 #define DECLARE_FLAG(Type) \
439 stream->Add(#Type); \
441 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG)
442 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG)
449 for (int index = 0; index < num_global_vars_; ++index) {
450 if (side_effects.ContainsSpecial(GlobalVar(index))) {
451 stream->Add(separator);
453 stream->Add("[%p]", *global_vars_[index].handle());
456 for (int index = 0; index < num_inobject_fields_; ++index) {
457 if (side_effects.ContainsSpecial(InobjectField(index))) {
458 stream->Add(separator);
460 inobject_fields_[index].PrintTo(stream);
467 bool SideEffectsTracker::ComputeGlobalVar(Unique<Cell> cell, int* index) {
468 for (int i = 0; i < num_global_vars_; ++i) {
469 if (cell == global_vars_[i]) {
474 if (num_global_vars_ < kNumberOfGlobalVars) {
475 if (FLAG_trace_gvn) {
476 HeapStringAllocator allocator;
477 StringStream stream(&allocator);
478 stream.Add("Tracking global var [%p] (mapped to index %d)\n",
479 *cell.handle(), num_global_vars_);
480 stream.OutputToStdOut();
482 *index = num_global_vars_;
483 global_vars_[num_global_vars_++] = cell;
490 bool SideEffectsTracker::ComputeInobjectField(HObjectAccess access,
492 for (int i = 0; i < num_inobject_fields_; ++i) {
493 if (access.Equals(inobject_fields_[i])) {
498 if (num_inobject_fields_ < kNumberOfInobjectFields) {
499 if (FLAG_trace_gvn) {
500 HeapStringAllocator allocator;
501 StringStream stream(&allocator);
502 stream.Add("Tracking inobject field access ");
503 access.PrintTo(&stream);
504 stream.Add(" (mapped to index %d)\n", num_inobject_fields_);
505 stream.OutputToStdOut();
507 *index = num_inobject_fields_;
508 inobject_fields_[num_inobject_fields_++] = access;
515 HGlobalValueNumberingPhase::HGlobalValueNumberingPhase(HGraph* graph)
516 : HPhase("H_Global value numbering", graph),
517 removed_side_effects_(false),
518 block_side_effects_(graph->blocks()->length(), zone()),
519 loop_side_effects_(graph->blocks()->length(), zone()),
520 visited_on_paths_(graph->blocks()->length(), zone()) {
521 ASSERT(!AllowHandleAllocation::IsAllowed());
522 block_side_effects_.AddBlock(
523 SideEffects(), graph->blocks()->length(), zone());
524 loop_side_effects_.AddBlock(
525 SideEffects(), graph->blocks()->length(), zone());
529 void HGlobalValueNumberingPhase::Run() {
530 ASSERT(!removed_side_effects_);
531 for (int i = FLAG_gvn_iterations; i > 0; --i) {
532 // Compute the side effects.
533 ComputeBlockSideEffects();
535 // Perform loop invariant code motion if requested.
536 if (FLAG_loop_invariant_code_motion) LoopInvariantCodeMotion();
538 // Perform the actual value numbering.
541 // Continue GVN if we removed any side effects.
542 if (!removed_side_effects_) break;
543 removed_side_effects_ = false;
545 // Clear all side effects.
546 ASSERT_EQ(block_side_effects_.length(), graph()->blocks()->length());
547 ASSERT_EQ(loop_side_effects_.length(), graph()->blocks()->length());
548 for (int i = 0; i < graph()->blocks()->length(); ++i) {
549 block_side_effects_[i].RemoveAll();
550 loop_side_effects_[i].RemoveAll();
552 visited_on_paths_.Clear();
557 void HGlobalValueNumberingPhase::ComputeBlockSideEffects() {
558 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) {
559 // Compute side effects for the block.
560 HBasicBlock* block = graph()->blocks()->at(i);
561 SideEffects side_effects;
562 if (block->IsReachable() && !block->IsDeoptimizing()) {
563 int id = block->block_id();
564 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
565 HInstruction* instr = it.Current();
566 side_effects.Add(side_effects_tracker_.ComputeChanges(instr));
568 block_side_effects_[id].Add(side_effects);
570 // Loop headers are part of their loop.
571 if (block->IsLoopHeader()) {
572 loop_side_effects_[id].Add(side_effects);
575 // Propagate loop side effects upwards.
576 if (block->HasParentLoopHeader()) {
577 HBasicBlock* with_parent = block;
578 if (block->IsLoopHeader()) side_effects = loop_side_effects_[id];
580 HBasicBlock* parent_block = with_parent->parent_loop_header();
581 loop_side_effects_[parent_block->block_id()].Add(side_effects);
582 with_parent = parent_block;
583 } while (with_parent->HasParentLoopHeader());
590 void HGlobalValueNumberingPhase::LoopInvariantCodeMotion() {
591 TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n",
592 graph()->use_optimistic_licm() ? "yes" : "no");
593 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) {
594 HBasicBlock* block = graph()->blocks()->at(i);
595 if (block->IsLoopHeader()) {
596 SideEffects side_effects = loop_side_effects_[block->block_id()];
597 if (FLAG_trace_gvn) {
598 HeapStringAllocator allocator;
599 StringStream stream(&allocator);
600 stream.Add("Try loop invariant motion for block B%d changes ",
602 side_effects_tracker_.PrintSideEffectsTo(&stream, side_effects);
604 stream.OutputToStdOut();
606 HBasicBlock* last = block->loop_information()->GetLastBackEdge();
607 for (int j = block->block_id(); j <= last->block_id(); ++j) {
608 ProcessLoopBlock(graph()->blocks()->at(j), block, side_effects);
615 void HGlobalValueNumberingPhase::ProcessLoopBlock(
617 HBasicBlock* loop_header,
618 SideEffects loop_kills) {
619 HBasicBlock* pre_header = loop_header->predecessors()->at(0);
620 if (FLAG_trace_gvn) {
621 HeapStringAllocator allocator;
622 StringStream stream(&allocator);
623 stream.Add("Loop invariant code motion for B%d depends on ",
625 side_effects_tracker_.PrintSideEffectsTo(&stream, loop_kills);
627 stream.OutputToStdOut();
629 HInstruction* instr = block->first();
630 while (instr != NULL) {
631 HInstruction* next = instr->next();
632 if (instr->CheckFlag(HValue::kUseGVN)) {
633 SideEffects changes = side_effects_tracker_.ComputeChanges(instr);
634 SideEffects depends_on = side_effects_tracker_.ComputeDependsOn(instr);
635 if (FLAG_trace_gvn) {
636 HeapStringAllocator allocator;
637 StringStream stream(&allocator);
638 stream.Add("Checking instruction i%d (%s) changes ",
639 instr->id(), instr->Mnemonic());
640 side_effects_tracker_.PrintSideEffectsTo(&stream, changes);
641 stream.Add(", depends on ");
642 side_effects_tracker_.PrintSideEffectsTo(&stream, depends_on);
643 stream.Add(". Loop changes ");
644 side_effects_tracker_.PrintSideEffectsTo(&stream, loop_kills);
646 stream.OutputToStdOut();
648 bool can_hoist = !depends_on.ContainsAnyOf(loop_kills);
649 if (can_hoist && !graph()->use_optimistic_licm()) {
650 can_hoist = block->IsLoopSuccessorDominator();
654 bool inputs_loop_invariant = true;
655 for (int i = 0; i < instr->OperandCount(); ++i) {
656 if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
657 inputs_loop_invariant = false;
661 if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
662 TRACE_GVN_2("Hoisting loop invariant instruction i%d to block B%d\n",
663 instr->id(), pre_header->block_id());
664 // Move the instruction out of the loop.
666 instr->InsertBefore(pre_header->end());
667 if (instr->HasSideEffects()) removed_side_effects_ = true;
676 bool HGlobalValueNumberingPhase::AllowCodeMotion() {
677 return info()->IsStub() || info()->opt_count() + 1 < FLAG_max_opt_count;
681 bool HGlobalValueNumberingPhase::ShouldMove(HInstruction* instr,
682 HBasicBlock* loop_header) {
683 // If we've disabled code motion or we're in a block that unconditionally
684 // deoptimizes, don't move any instructions.
685 return AllowCodeMotion() && !instr->block()->IsDeoptimizing() &&
686 instr->block()->IsReachable();
691 HGlobalValueNumberingPhase::CollectSideEffectsOnPathsToDominatedBlock(
692 HBasicBlock* dominator, HBasicBlock* dominated) {
693 SideEffects side_effects;
694 for (int i = 0; i < dominated->predecessors()->length(); ++i) {
695 HBasicBlock* block = dominated->predecessors()->at(i);
696 if (dominator->block_id() < block->block_id() &&
697 block->block_id() < dominated->block_id() &&
698 !visited_on_paths_.Contains(block->block_id())) {
699 visited_on_paths_.Add(block->block_id());
700 side_effects.Add(block_side_effects_[block->block_id()]);
701 if (block->IsLoopHeader()) {
702 side_effects.Add(loop_side_effects_[block->block_id()]);
704 side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock(
712 // Each instance of this class is like a "stack frame" for the recursive
713 // traversal of the dominator tree done during GVN (the stack is handled
714 // as a double linked list).
715 // We reuse frames when possible so the list length is limited by the depth
716 // of the dominator tree but this forces us to initialize each frame calling
717 // an explicit "Initialize" method instead of a using constructor.
718 class GvnBasicBlockState: public ZoneObject {
720 static GvnBasicBlockState* CreateEntry(Zone* zone,
721 HBasicBlock* entry_block,
722 HInstructionMap* entry_map) {
724 GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone);
727 HBasicBlock* block() { return block_; }
728 HInstructionMap* map() { return map_; }
729 HSideEffectMap* dominators() { return &dominators_; }
731 GvnBasicBlockState* next_in_dominator_tree_traversal(
733 HBasicBlock** dominator) {
734 // This assignment needs to happen before calling next_dominated() because
735 // that call can reuse "this" if we are at the last dominated block.
736 *dominator = block();
737 GvnBasicBlockState* result = next_dominated(zone);
738 if (result == NULL) {
739 GvnBasicBlockState* dominator_state = pop();
740 if (dominator_state != NULL) {
741 // This branch is guaranteed not to return NULL because pop() never
742 // returns a state where "is_done() == true".
743 *dominator = dominator_state->block();
744 result = dominator_state->next_dominated(zone);
746 // Unnecessary (we are returning NULL) but done for cleanness.
754 void Initialize(HBasicBlock* block,
755 HInstructionMap* map,
756 HSideEffectMap* dominators,
760 map_ = copy_map ? map->Copy(zone) : map;
761 dominated_index_ = -1;
762 length_ = block->dominated_blocks()->length();
763 if (dominators != NULL) {
764 dominators_ = *dominators;
767 bool is_done() { return dominated_index_ >= length_; }
769 GvnBasicBlockState(GvnBasicBlockState* previous,
771 HInstructionMap* map,
772 HSideEffectMap* dominators,
774 : previous_(previous), next_(NULL) {
775 Initialize(block, map, dominators, true, zone);
778 GvnBasicBlockState* next_dominated(Zone* zone) {
780 if (dominated_index_ == length_ - 1) {
781 // No need to copy the map for the last child in the dominator tree.
782 Initialize(block_->dominated_blocks()->at(dominated_index_),
788 } else if (dominated_index_ < length_) {
789 return push(zone, block_->dominated_blocks()->at(dominated_index_));
795 GvnBasicBlockState* push(Zone* zone, HBasicBlock* block) {
798 new(zone) GvnBasicBlockState(this, block, map(), dominators(), zone);
800 next_->Initialize(block, map(), dominators(), true, zone);
804 GvnBasicBlockState* pop() {
805 GvnBasicBlockState* result = previous_;
806 while (result != NULL && result->is_done()) {
807 TRACE_GVN_2("Backtracking from block B%d to block b%d\n",
809 previous_->block()->block_id())
810 result = result->previous_;
815 GvnBasicBlockState* previous_;
816 GvnBasicBlockState* next_;
818 HInstructionMap* map_;
819 HSideEffectMap dominators_;
820 int dominated_index_;
825 // This is a recursive traversal of the dominator tree but it has been turned
826 // into a loop to avoid stack overflows.
827 // The logical "stack frames" of the recursion are kept in a list of
828 // GvnBasicBlockState instances.
829 void HGlobalValueNumberingPhase::AnalyzeGraph() {
830 HBasicBlock* entry_block = graph()->entry_block();
831 HInstructionMap* entry_map =
832 new(zone()) HInstructionMap(zone(), &side_effects_tracker_);
833 GvnBasicBlockState* current =
834 GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map);
836 while (current != NULL) {
837 HBasicBlock* block = current->block();
838 HInstructionMap* map = current->map();
839 HSideEffectMap* dominators = current->dominators();
841 TRACE_GVN_2("Analyzing block B%d%s\n",
843 block->IsLoopHeader() ? " (loop header)" : "");
845 // If this is a loop header kill everything killed by the loop.
846 if (block->IsLoopHeader()) {
847 map->Kill(loop_side_effects_[block->block_id()]);
848 dominators->Kill(loop_side_effects_[block->block_id()]);
851 // Go through all instructions of the current block.
852 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
853 HInstruction* instr = it.Current();
854 if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) {
855 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
856 HValue* other = dominators->at(i);
857 GVNFlag flag = GVNFlagFromInt(i);
858 if (instr->DependsOnFlags().Contains(flag) && other != NULL) {
859 TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n",
865 if (instr->HandleSideEffectDominator(flag, other)) {
866 removed_side_effects_ = true;
871 // Instruction was unlinked during graph traversal.
872 if (!instr->IsLinked()) continue;
874 SideEffects changes = side_effects_tracker_.ComputeChanges(instr);
875 if (!changes.IsEmpty()) {
876 // Clear all instructions in the map that are affected by side effects.
877 // Store instruction as the dominating one for tracked side effects.
879 dominators->Store(changes, instr);
880 if (FLAG_trace_gvn) {
881 HeapStringAllocator allocator;
882 StringStream stream(&allocator);
883 stream.Add("Instruction i%d changes ", instr->id());
884 side_effects_tracker_.PrintSideEffectsTo(&stream, changes);
886 stream.OutputToStdOut();
889 if (instr->CheckFlag(HValue::kUseGVN)) {
890 ASSERT(!instr->HasObservableSideEffects());
891 HInstruction* other = map->Lookup(instr);
893 ASSERT(instr->Equals(other) && other->Equals(instr));
894 TRACE_GVN_4("Replacing instruction i%d (%s) with i%d (%s)\n",
899 if (instr->HasSideEffects()) removed_side_effects_ = true;
900 instr->DeleteAndReplaceWith(other);
902 map->Add(instr, zone());
907 HBasicBlock* dominator_block;
908 GvnBasicBlockState* next =
909 current->next_in_dominator_tree_traversal(zone(),
913 HBasicBlock* dominated = next->block();
914 HInstructionMap* successor_map = next->map();
915 HSideEffectMap* successor_dominators = next->dominators();
917 // Kill everything killed on any path between this block and the
918 // dominated block. We don't have to traverse these paths if the
919 // value map and the dominators list is already empty. If the range
920 // of block ids (block_id, dominated_id) is empty there are no such
922 if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) &&
923 dominator_block->block_id() + 1 < dominated->block_id()) {
924 visited_on_paths_.Clear();
925 SideEffects side_effects_on_all_paths =
926 CollectSideEffectsOnPathsToDominatedBlock(dominator_block,
928 successor_map->Kill(side_effects_on_all_paths);
929 successor_dominators->Kill(side_effects_on_all_paths);
936 } } // namespace v8::internal