1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 #include "hydrogen-gvn.h"
35 class HValueMap: public ZoneObject {
37 explicit HValueMap(Zone* zone)
44 free_list_head_(kNil) {
45 ResizeLists(kInitialSize, zone);
46 Resize(kInitialSize, zone);
49 void Kill(GVNFlagSet flags);
51 void Add(HValue* value, Zone* zone) {
52 present_flags_.Add(value->gvn_flags());
56 HValue* Lookup(HValue* value) const;
58 HValueMap* Copy(Zone* zone) const {
59 return new(zone) HValueMap(zone, this);
62 bool IsEmpty() const { return count_ == 0; }
65 // A linked list of HValue* values. Stored in arrays.
66 struct HValueMapListElement {
68 int next; // Index in the array of the next list element.
70 static const int kNil = -1; // The end of a linked list
72 // Must be a power of 2.
73 static const int kInitialSize = 16;
75 HValueMap(Zone* zone, const HValueMap* other);
77 void Resize(int new_size, Zone* zone);
78 void ResizeLists(int new_size, Zone* zone);
79 void Insert(HValue* value, Zone* zone);
80 uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); }
84 int count_; // The number of values stored in the HValueMap.
85 GVNFlagSet present_flags_; // All flags that are in any value in the
87 HValueMapListElement* array_; // Primary store - contains the first value
88 // with a given hash. Colliding elements are stored in linked lists.
89 HValueMapListElement* lists_; // The linked lists containing hash collisions.
90 int free_list_head_; // Unused elements in lists_ are on the free list.
94 class HSideEffectMap BASE_EMBEDDED {
97 explicit HSideEffectMap(HSideEffectMap* other);
98 HSideEffectMap& operator= (const HSideEffectMap& other);
100 void Kill(GVNFlagSet flags);
102 void Store(GVNFlagSet flags, HInstruction* instr);
104 bool IsEmpty() const { return count_ == 0; }
106 inline HInstruction* operator[](int i) const {
108 ASSERT(i < kNumberOfTrackedSideEffects);
111 inline HInstruction* at(int i) const { return operator[](i); }
115 HInstruction* data_[kNumberOfTrackedSideEffects];
119 void TraceGVN(const char* msg, ...) {
121 va_start(arguments, msg);
122 OS::VPrint(msg, arguments);
127 // Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when
128 // --trace-gvn is off.
129 #define TRACE_GVN_1(msg, a1) \
130 if (FLAG_trace_gvn) { \
134 #define TRACE_GVN_2(msg, a1, a2) \
135 if (FLAG_trace_gvn) { \
136 TraceGVN(msg, a1, a2); \
139 #define TRACE_GVN_3(msg, a1, a2, a3) \
140 if (FLAG_trace_gvn) { \
141 TraceGVN(msg, a1, a2, a3); \
144 #define TRACE_GVN_4(msg, a1, a2, a3, a4) \
145 if (FLAG_trace_gvn) { \
146 TraceGVN(msg, a1, a2, a3, a4); \
149 #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \
150 if (FLAG_trace_gvn) { \
151 TraceGVN(msg, a1, a2, a3, a4, a5); \
155 HValueMap::HValueMap(Zone* zone, const HValueMap* other)
156 : array_size_(other->array_size_),
157 lists_size_(other->lists_size_),
158 count_(other->count_),
159 present_flags_(other->present_flags_),
160 array_(zone->NewArray<HValueMapListElement>(other->array_size_)),
161 lists_(zone->NewArray<HValueMapListElement>(other->lists_size_)),
162 free_list_head_(other->free_list_head_) {
164 array_, other->array_, array_size_ * sizeof(HValueMapListElement));
166 lists_, other->lists_, lists_size_ * sizeof(HValueMapListElement));
170 void HValueMap::Kill(GVNFlagSet flags) {
171 GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(flags);
172 if (!present_flags_.ContainsAnyOf(depends_flags)) return;
173 present_flags_.RemoveAll();
174 for (int i = 0; i < array_size_; ++i) {
175 HValue* value = array_[i].value;
177 // Clear list of collisions first, so we know if it becomes empty.
178 int kept = kNil; // List of kept elements.
180 for (int current = array_[i].next; current != kNil; current = next) {
181 next = lists_[current].next;
182 HValue* value = lists_[current].value;
183 if (value->gvn_flags().ContainsAnyOf(depends_flags)) {
186 lists_[current].next = free_list_head_;
187 free_list_head_ = current;
190 lists_[current].next = kept;
192 present_flags_.Add(value->gvn_flags());
195 array_[i].next = kept;
197 // Now possibly drop directly indexed element.
198 value = array_[i].value;
199 if (value->gvn_flags().ContainsAnyOf(depends_flags)) { // Drop it.
201 int head = array_[i].next;
203 array_[i].value = NULL;
205 array_[i].value = lists_[head].value;
206 array_[i].next = lists_[head].next;
207 lists_[head].next = free_list_head_;
208 free_list_head_ = head;
211 present_flags_.Add(value->gvn_flags()); // Keep it.
218 HValue* HValueMap::Lookup(HValue* value) const {
219 uint32_t hash = static_cast<uint32_t>(value->Hashcode());
220 uint32_t pos = Bound(hash);
221 if (array_[pos].value != NULL) {
222 if (array_[pos].value->Equals(value)) return array_[pos].value;
223 int next = array_[pos].next;
224 while (next != kNil) {
225 if (lists_[next].value->Equals(value)) return lists_[next].value;
226 next = lists_[next].next;
233 void HValueMap::Resize(int new_size, Zone* zone) {
234 ASSERT(new_size > count_);
235 // Hashing the values into the new array has no more collisions than in the
236 // old hash map, so we can use the existing lists_ array, if we are careful.
238 // Make sure we have at least one free element.
239 if (free_list_head_ == kNil) {
240 ResizeLists(lists_size_ << 1, zone);
243 HValueMapListElement* new_array =
244 zone->NewArray<HValueMapListElement>(new_size);
245 memset(new_array, 0, sizeof(HValueMapListElement) * new_size);
247 HValueMapListElement* old_array = array_;
248 int old_size = array_size_;
250 int old_count = count_;
252 // Do not modify present_flags_. It is currently correct.
253 array_size_ = new_size;
256 if (old_array != NULL) {
257 // Iterate over all the elements in lists, rehashing them.
258 for (int i = 0; i < old_size; ++i) {
259 if (old_array[i].value != NULL) {
260 int current = old_array[i].next;
261 while (current != kNil) {
262 Insert(lists_[current].value, zone);
263 int next = lists_[current].next;
264 lists_[current].next = free_list_head_;
265 free_list_head_ = current;
268 // Rehash the directly stored value.
269 Insert(old_array[i].value, zone);
274 ASSERT(count_ == old_count);
278 void HValueMap::ResizeLists(int new_size, Zone* zone) {
279 ASSERT(new_size > lists_size_);
281 HValueMapListElement* new_lists =
282 zone->NewArray<HValueMapListElement>(new_size);
283 memset(new_lists, 0, sizeof(HValueMapListElement) * new_size);
285 HValueMapListElement* old_lists = lists_;
286 int old_size = lists_size_;
288 lists_size_ = new_size;
291 if (old_lists != NULL) {
292 OS::MemCopy(lists_, old_lists, old_size * sizeof(HValueMapListElement));
294 for (int i = old_size; i < lists_size_; ++i) {
295 lists_[i].next = free_list_head_;
301 void HValueMap::Insert(HValue* value, Zone* zone) {
302 ASSERT(value != NULL);
303 // Resizing when half of the hashtable is filled up.
304 if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone);
305 ASSERT(count_ < array_size_);
307 uint32_t pos = Bound(static_cast<uint32_t>(value->Hashcode()));
308 if (array_[pos].value == NULL) {
309 array_[pos].value = value;
310 array_[pos].next = kNil;
312 if (free_list_head_ == kNil) {
313 ResizeLists(lists_size_ << 1, zone);
315 int new_element_pos = free_list_head_;
316 ASSERT(new_element_pos != kNil);
317 free_list_head_ = lists_[free_list_head_].next;
318 lists_[new_element_pos].value = value;
319 lists_[new_element_pos].next = array_[pos].next;
320 ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].value != NULL);
321 array_[pos].next = new_element_pos;
326 HSideEffectMap::HSideEffectMap() : count_(0) {
327 memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize);
331 HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) {
332 *this = *other; // Calls operator=.
336 HSideEffectMap& HSideEffectMap::operator= (const HSideEffectMap& other) {
337 if (this != &other) {
338 OS::MemCopy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize);
344 void HSideEffectMap::Kill(GVNFlagSet flags) {
345 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
346 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
347 if (flags.Contains(changes_flag)) {
348 if (data_[i] != NULL) count_--;
355 void HSideEffectMap::Store(GVNFlagSet flags, HInstruction* instr) {
356 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
357 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
358 if (flags.Contains(changes_flag)) {
359 if (data_[i] == NULL) count_++;
366 HGlobalValueNumberingPhase::HGlobalValueNumberingPhase(HGraph* graph)
367 : HPhase("H_Global value numbering", graph),
368 removed_side_effects_(false),
369 block_side_effects_(graph->blocks()->length(), zone()),
370 loop_side_effects_(graph->blocks()->length(), zone()),
371 visited_on_paths_(graph->blocks()->length(), zone()) {
372 ASSERT(!AllowHandleAllocation::IsAllowed());
373 block_side_effects_.AddBlock(GVNFlagSet(), graph->blocks()->length(),
375 loop_side_effects_.AddBlock(GVNFlagSet(), graph->blocks()->length(),
379 void HGlobalValueNumberingPhase::Analyze() {
380 removed_side_effects_ = false;
381 ComputeBlockSideEffects();
382 if (FLAG_loop_invariant_code_motion) {
383 LoopInvariantCodeMotion();
389 void HGlobalValueNumberingPhase::ComputeBlockSideEffects() {
390 // The Analyze phase of GVN can be called multiple times. Clear loop side
391 // effects before computing them to erase the contents from previous Analyze
393 for (int i = 0; i < loop_side_effects_.length(); ++i) {
394 loop_side_effects_[i].RemoveAll();
396 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) {
397 // Compute side effects for the block.
398 HBasicBlock* block = graph()->blocks()->at(i);
399 GVNFlagSet side_effects;
400 if (block->IsReachable() && !block->IsDeoptimizing()) {
401 int id = block->block_id();
402 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
403 HInstruction* instr = it.Current();
404 side_effects.Add(instr->ChangesFlags());
406 block_side_effects_[id].Add(side_effects);
408 // Loop headers are part of their loop.
409 if (block->IsLoopHeader()) {
410 loop_side_effects_[id].Add(side_effects);
413 // Propagate loop side effects upwards.
414 if (block->HasParentLoopHeader()) {
415 int header_id = block->parent_loop_header()->block_id();
416 loop_side_effects_[header_id].Add(block->IsLoopHeader()
417 ? loop_side_effects_[id]
425 SmartArrayPointer<char> GetGVNFlagsString(GVNFlagSet flags) {
426 char underlying_buffer[kLastFlag * 128];
427 Vector<char> buffer(underlying_buffer, sizeof(underlying_buffer));
430 const char* separator = "";
431 const char* comma = ", ";
433 uint32_t set_depends_on = 0;
434 uint32_t set_changes = 0;
435 for (int bit = 0; bit < kLastFlag; ++bit) {
436 if ((flags.ToIntegral() & (1 << bit)) != 0) {
444 bool positive_changes = set_changes < (kLastFlag / 2);
445 bool positive_depends_on = set_depends_on < (kLastFlag / 2);
446 if (set_changes > 0) {
447 if (positive_changes) {
448 offset += OS::SNPrintF(buffer + offset, "changes [");
450 offset += OS::SNPrintF(buffer + offset, "changes all except [");
452 for (int bit = 0; bit < kLastFlag; ++bit) {
453 if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_changes) {
454 switch (static_cast<GVNFlag>(bit)) {
455 #define DECLARE_FLAG(type) \
456 case kChanges##type: \
457 offset += OS::SNPrintF(buffer + offset, separator); \
458 offset += OS::SNPrintF(buffer + offset, #type); \
461 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG)
462 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG)
469 offset += OS::SNPrintF(buffer + offset, "]");
471 if (set_depends_on > 0) {
473 if (set_changes > 0) {
474 offset += OS::SNPrintF(buffer + offset, ", ");
476 if (positive_depends_on) {
477 offset += OS::SNPrintF(buffer + offset, "depends on [");
479 offset += OS::SNPrintF(buffer + offset, "depends on all except [");
481 for (int bit = 0; bit < kLastFlag; ++bit) {
482 if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_depends_on) {
483 switch (static_cast<GVNFlag>(bit)) {
484 #define DECLARE_FLAG(type) \
485 case kDependsOn##type: \
486 offset += OS::SNPrintF(buffer + offset, separator); \
487 offset += OS::SNPrintF(buffer + offset, #type); \
490 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG)
491 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG)
498 offset += OS::SNPrintF(buffer + offset, "]");
501 OS::SNPrintF(buffer, "0x%08X", flags.ToIntegral());
503 size_t string_len = strlen(underlying_buffer) + 1;
504 ASSERT(string_len <= sizeof(underlying_buffer));
505 char* result = new char[strlen(underlying_buffer) + 1];
506 OS::MemCopy(result, underlying_buffer, string_len);
507 return SmartArrayPointer<char>(result);
511 void HGlobalValueNumberingPhase::LoopInvariantCodeMotion() {
512 TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n",
513 graph()->use_optimistic_licm() ? "yes" : "no");
514 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) {
515 HBasicBlock* block = graph()->blocks()->at(i);
516 if (block->IsLoopHeader()) {
517 GVNFlagSet side_effects = loop_side_effects_[block->block_id()];
518 TRACE_GVN_2("Try loop invariant motion for block B%d %s\n",
520 *GetGVNFlagsString(side_effects));
522 GVNFlagSet accumulated_first_time_depends;
523 GVNFlagSet accumulated_first_time_changes;
524 HBasicBlock* last = block->loop_information()->GetLastBackEdge();
525 for (int j = block->block_id(); j <= last->block_id(); ++j) {
526 ProcessLoopBlock(graph()->blocks()->at(j), block, side_effects,
527 &accumulated_first_time_depends,
528 &accumulated_first_time_changes);
535 void HGlobalValueNumberingPhase::ProcessLoopBlock(
537 HBasicBlock* loop_header,
538 GVNFlagSet loop_kills,
539 GVNFlagSet* first_time_depends,
540 GVNFlagSet* first_time_changes) {
541 HBasicBlock* pre_header = loop_header->predecessors()->at(0);
542 GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills);
543 TRACE_GVN_2("Loop invariant motion for B%d %s\n",
545 *GetGVNFlagsString(depends_flags));
546 HInstruction* instr = block->first();
547 while (instr != NULL) {
548 HInstruction* next = instr->next();
549 bool hoisted = false;
550 if (instr->CheckFlag(HValue::kUseGVN)) {
551 TRACE_GVN_4("Checking instruction %d (%s) %s. Loop %s\n",
554 *GetGVNFlagsString(instr->gvn_flags()),
555 *GetGVNFlagsString(loop_kills));
556 bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags);
557 if (can_hoist && !graph()->use_optimistic_licm()) {
558 can_hoist = block->IsLoopSuccessorDominator();
562 bool inputs_loop_invariant = true;
563 for (int i = 0; i < instr->OperandCount(); ++i) {
564 if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
565 inputs_loop_invariant = false;
569 if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
570 TRACE_GVN_1("Hoisting loop invariant instruction %d\n", instr->id());
571 // Move the instruction out of the loop.
573 instr->InsertBefore(pre_header->end());
574 if (instr->HasSideEffects()) removed_side_effects_ = true;
580 // If an instruction is not hoisted, we have to account for its side
581 // effects when hoisting later HTransitionElementsKind instructions.
582 GVNFlagSet previous_depends = *first_time_depends;
583 GVNFlagSet previous_changes = *first_time_changes;
584 first_time_depends->Add(instr->DependsOnFlags());
585 first_time_changes->Add(instr->ChangesFlags());
586 if (!(previous_depends == *first_time_depends)) {
587 TRACE_GVN_1("Updated first-time accumulated %s\n",
588 *GetGVNFlagsString(*first_time_depends));
590 if (!(previous_changes == *first_time_changes)) {
591 TRACE_GVN_1("Updated first-time accumulated %s\n",
592 *GetGVNFlagsString(*first_time_changes));
600 bool HGlobalValueNumberingPhase::AllowCodeMotion() {
601 return info()->IsStub() || info()->opt_count() + 1 < FLAG_max_opt_count;
605 bool HGlobalValueNumberingPhase::ShouldMove(HInstruction* instr,
606 HBasicBlock* loop_header) {
607 // If we've disabled code motion or we're in a block that unconditionally
608 // deoptimizes, don't move any instructions.
609 return AllowCodeMotion() && !instr->block()->IsDeoptimizing() &&
610 instr->block()->IsReachable();
615 HGlobalValueNumberingPhase::CollectSideEffectsOnPathsToDominatedBlock(
616 HBasicBlock* dominator, HBasicBlock* dominated) {
617 GVNFlagSet side_effects;
618 for (int i = 0; i < dominated->predecessors()->length(); ++i) {
619 HBasicBlock* block = dominated->predecessors()->at(i);
620 if (dominator->block_id() < block->block_id() &&
621 block->block_id() < dominated->block_id() &&
622 !visited_on_paths_.Contains(block->block_id())) {
623 visited_on_paths_.Add(block->block_id());
624 side_effects.Add(block_side_effects_[block->block_id()]);
625 if (block->IsLoopHeader()) {
626 side_effects.Add(loop_side_effects_[block->block_id()]);
628 side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock(
636 // Each instance of this class is like a "stack frame" for the recursive
637 // traversal of the dominator tree done during GVN (the stack is handled
638 // as a double linked list).
639 // We reuse frames when possible so the list length is limited by the depth
640 // of the dominator tree but this forces us to initialize each frame calling
641 // an explicit "Initialize" method instead of a using constructor.
642 class GvnBasicBlockState: public ZoneObject {
644 static GvnBasicBlockState* CreateEntry(Zone* zone,
645 HBasicBlock* entry_block,
646 HValueMap* entry_map) {
648 GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone);
651 HBasicBlock* block() { return block_; }
652 HValueMap* map() { return map_; }
653 HSideEffectMap* dominators() { return &dominators_; }
655 GvnBasicBlockState* next_in_dominator_tree_traversal(
657 HBasicBlock** dominator) {
658 // This assignment needs to happen before calling next_dominated() because
659 // that call can reuse "this" if we are at the last dominated block.
660 *dominator = block();
661 GvnBasicBlockState* result = next_dominated(zone);
662 if (result == NULL) {
663 GvnBasicBlockState* dominator_state = pop();
664 if (dominator_state != NULL) {
665 // This branch is guaranteed not to return NULL because pop() never
666 // returns a state where "is_done() == true".
667 *dominator = dominator_state->block();
668 result = dominator_state->next_dominated(zone);
670 // Unnecessary (we are returning NULL) but done for cleanness.
678 void Initialize(HBasicBlock* block,
680 HSideEffectMap* dominators,
684 map_ = copy_map ? map->Copy(zone) : map;
685 dominated_index_ = -1;
686 length_ = block->dominated_blocks()->length();
687 if (dominators != NULL) {
688 dominators_ = *dominators;
691 bool is_done() { return dominated_index_ >= length_; }
693 GvnBasicBlockState(GvnBasicBlockState* previous,
696 HSideEffectMap* dominators,
698 : previous_(previous), next_(NULL) {
699 Initialize(block, map, dominators, true, zone);
702 GvnBasicBlockState* next_dominated(Zone* zone) {
704 if (dominated_index_ == length_ - 1) {
705 // No need to copy the map for the last child in the dominator tree.
706 Initialize(block_->dominated_blocks()->at(dominated_index_),
712 } else if (dominated_index_ < length_) {
713 return push(zone, block_->dominated_blocks()->at(dominated_index_));
719 GvnBasicBlockState* push(Zone* zone, HBasicBlock* block) {
722 new(zone) GvnBasicBlockState(this, block, map(), dominators(), zone);
724 next_->Initialize(block, map(), dominators(), true, zone);
728 GvnBasicBlockState* pop() {
729 GvnBasicBlockState* result = previous_;
730 while (result != NULL && result->is_done()) {
731 TRACE_GVN_2("Backtracking from block B%d to block b%d\n",
733 previous_->block()->block_id())
734 result = result->previous_;
739 GvnBasicBlockState* previous_;
740 GvnBasicBlockState* next_;
743 HSideEffectMap dominators_;
744 int dominated_index_;
749 // This is a recursive traversal of the dominator tree but it has been turned
750 // into a loop to avoid stack overflows.
751 // The logical "stack frames" of the recursion are kept in a list of
752 // GvnBasicBlockState instances.
753 void HGlobalValueNumberingPhase::AnalyzeGraph() {
754 HBasicBlock* entry_block = graph()->entry_block();
755 HValueMap* entry_map = new(zone()) HValueMap(zone());
756 GvnBasicBlockState* current =
757 GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map);
759 while (current != NULL) {
760 HBasicBlock* block = current->block();
761 HValueMap* map = current->map();
762 HSideEffectMap* dominators = current->dominators();
764 TRACE_GVN_2("Analyzing block B%d%s\n",
766 block->IsLoopHeader() ? " (loop header)" : "");
768 // If this is a loop header kill everything killed by the loop.
769 if (block->IsLoopHeader()) {
770 map->Kill(loop_side_effects_[block->block_id()]);
771 dominators->Kill(loop_side_effects_[block->block_id()]);
774 // Go through all instructions of the current block.
775 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
776 HInstruction* instr = it.Current();
777 if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) {
778 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
779 HValue* other = dominators->at(i);
780 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
781 GVNFlag depends_on_flag = HValue::DependsOnFlagFromInt(i);
782 if (instr->DependsOnFlags().Contains(depends_on_flag) &&
784 TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n",
790 instr->HandleSideEffectDominator(changes_flag, other);
794 // Instruction was unlinked during graph traversal.
795 if (!instr->IsLinked()) continue;
797 GVNFlagSet flags = instr->ChangesFlags();
798 if (!flags.IsEmpty()) {
799 // Clear all instructions in the map that are affected by side effects.
800 // Store instruction as the dominating one for tracked side effects.
802 dominators->Store(flags, instr);
803 TRACE_GVN_2("Instruction %d %s\n", instr->id(),
804 *GetGVNFlagsString(flags));
806 if (instr->CheckFlag(HValue::kUseGVN)) {
807 ASSERT(!instr->HasObservableSideEffects());
808 HValue* other = map->Lookup(instr);
810 ASSERT(instr->Equals(other) && other->Equals(instr));
811 TRACE_GVN_4("Replacing value %d (%s) with value %d (%s)\n",
816 if (instr->HasSideEffects()) removed_side_effects_ = true;
817 instr->DeleteAndReplaceWith(other);
819 map->Add(instr, zone());
824 HBasicBlock* dominator_block;
825 GvnBasicBlockState* next =
826 current->next_in_dominator_tree_traversal(zone(),
830 HBasicBlock* dominated = next->block();
831 HValueMap* successor_map = next->map();
832 HSideEffectMap* successor_dominators = next->dominators();
834 // Kill everything killed on any path between this block and the
835 // dominated block. We don't have to traverse these paths if the
836 // value map and the dominators list is already empty. If the range
837 // of block ids (block_id, dominated_id) is empty there are no such
839 if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) &&
840 dominator_block->block_id() + 1 < dominated->block_id()) {
841 visited_on_paths_.Clear();
842 GVNFlagSet side_effects_on_all_paths =
843 CollectSideEffectsOnPathsToDominatedBlock(dominator_block,
845 successor_map->Kill(side_effects_on_all_paths);
846 successor_dominators->Kill(side_effects_on_all_paths);
853 } } // namespace v8::internal