{ MaybeObject* maybe_info = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE);
if (!maybe_info->To(&info)) return maybe_info;
}
- info->set_ic_total_count(0);
- info->set_ic_with_type_info_count(0);
+ info->initialize_storage();
info->set_type_feedback_cells(TypeFeedbackCells::cast(empty_fixed_array()),
SKIP_WRITE_BARRIER);
return info;
uint32_instructions_(NULL),
info_(info),
zone_(info->zone()),
- is_recursive_(false) {
+ is_recursive_(false),
+ use_optimistic_licm_(false),
+ type_change_checksum_(0) {
start_environment_ =
new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
start_environment_->set_ast_id(BailoutId::FunctionEntry());
void HGlobalValueNumberer::LoopInvariantCodeMotion() {
+ TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n",
+ graph_->use_optimistic_licm() ? "yes" : "no");
for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
HBasicBlock* block = graph_->blocks()->at(i);
if (block->IsLoopHeader()) {
*GetGVNFlagsString(instr->gvn_flags()),
*GetGVNFlagsString(loop_kills));
bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags);
+ if (can_hoist && !graph()->use_optimistic_licm()) {
+ can_hoist = block->IsLoopSuccessorDominator();
+ }
if (instr->IsTransitionElementsKind()) {
// It's possible to hoist transitions out of a loop as long as the
// hoisting wouldn't move the transition past an instruction that has a
current_block()->FinishExit(instr);
set_current_block(NULL);
}
+
+ // If the checksum of the number of type info changes is the same as the
+ // last time this function was compiled, then this recompile is likely not
+ // due to missing/inadequate type feedback, but rather too aggressive
+ // optimization. Disable optimistic LICM in that case.
+ Handle<Code> unoptimized_code(info()->shared_info()->code());
+ ASSERT(unoptimized_code->kind() == Code::FUNCTION);
+ Handle<Object> maybe_type_info(unoptimized_code->type_feedback_info());
+ Handle<TypeFeedbackInfo> type_info(
+ Handle<TypeFeedbackInfo>::cast(maybe_type_info));
+ int checksum = type_info->own_type_change_checksum();
+ int composite_checksum = graph()->update_type_change_checksum(checksum);
+ graph()->set_use_optimistic_licm(
+ !type_info->matches_inlined_type_change_checksum(composite_checksum));
+ type_info->set_inlined_type_change_checksum(composite_checksum);
}
return graph();
// Save the pending call context and type feedback oracle. Set up new ones
// for the inlined function.
ASSERT(target_shared->has_deoptimization_support());
+ Handle<Code> unoptimized_code(target_shared->code());
TypeFeedbackOracle target_oracle(
- Handle<Code>(target_shared->code()),
+ unoptimized_code,
Handle<Context>(target->context()->native_context()),
isolate(),
zone());
// Update inlined nodes count.
inlined_count_ += nodes_added;
+ ASSERT(unoptimized_code->kind() == Code::FUNCTION);
+ Handle<Object> maybe_type_info(unoptimized_code->type_feedback_info());
+ Handle<TypeFeedbackInfo> type_info(
+ Handle<TypeFeedbackInfo>::cast(maybe_type_info));
+ graph()->update_type_change_checksum(type_info->own_type_change_checksum());
+
TraceInline(target, caller, NULL);
if (current_block() != NULL) {
osr_values_.set(values);
}
+ int update_type_change_checksum(int delta) {
+ type_change_checksum_ += delta;
+ return type_change_checksum_;
+ }
+
+ bool use_optimistic_licm() {
+ return use_optimistic_licm_;
+ }
+
+ void set_use_optimistic_licm(bool value) {
+ use_optimistic_licm_ = value;
+ }
+
void MarkRecursive() {
is_recursive_ = true;
}
Zone* zone_;
bool is_recursive_;
+ bool use_optimistic_licm_;
+ int type_change_checksum_;
DISALLOW_COPY_AND_ASSIGN(HGraph);
};
int delta = ComputeTypeInfoCountDelta(old_target->ic_state(),
target->ic_state());
// Not all Code objects have TypeFeedbackInfo.
- if (delta != 0 && host->type_feedback_info()->IsTypeFeedbackInfo()) {
+ if (host->type_feedback_info()->IsTypeFeedbackInfo() && delta != 0) {
TypeFeedbackInfo* info =
TypeFeedbackInfo::cast(host->type_feedback_info());
- info->set_ic_with_type_info_count(
- info->ic_with_type_info_count() + delta);
+ info->change_ic_with_type_info_count(delta);
}
}
+ if (host->type_feedback_info()->IsTypeFeedbackInfo()) {
+ TypeFeedbackInfo* info =
+ TypeFeedbackInfo::cast(host->type_feedback_info());
+ info->change_own_type_change_checksum();
+ }
if (FLAG_watch_ic_patching) {
host->set_profiler_ticks(0);
Isolate::Current()->runtime_profiler()->NotifyICChanged();
void TypeFeedbackInfo::TypeFeedbackInfoVerify() {
- VerifyObjectField(kIcTotalCountOffset);
- VerifyObjectField(kIcWithTypeinfoCountOffset);
+ VerifyObjectField(kStorage1Offset);
+ VerifyObjectField(kStorage2Offset);
VerifyHeapPointer(type_feedback_cells());
}
}
-SMI_ACCESSORS(TypeFeedbackInfo, ic_total_count, kIcTotalCountOffset)
-SMI_ACCESSORS(TypeFeedbackInfo, ic_with_type_info_count,
- kIcWithTypeinfoCountOffset)
+int TypeFeedbackInfo::ic_total_count() {
+ int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
+ return ICTotalCountField::decode(current);
+}
+
+
+void TypeFeedbackInfo::set_ic_total_count(int count) {
+ int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
+ value = ICTotalCountField::update(value,
+ ICTotalCountField::decode(count));
+ WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
+}
+
+
+int TypeFeedbackInfo::ic_with_type_info_count() {
+ int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
+ return ICsWithTypeInfoCountField::decode(current);
+}
+
+
+void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
+ int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
+ int current_count = ICsWithTypeInfoCountField::decode(value);
+ value =
+ ICsWithTypeInfoCountField::update(value, current_count + delta);
+ WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
+}
+
+
+void TypeFeedbackInfo::initialize_storage() {
+ WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
+ WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
+}
+
+
+void TypeFeedbackInfo::change_own_type_change_checksum() {
+ int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
+ int checksum = OwnTypeChangeChecksum::decode(value);
+ checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
+ value = OwnTypeChangeChecksum::update(value, checksum);
+ // Ensure packed bit field is in Smi range.
+ if (value > Smi::kMaxValue) value |= Smi::kMinValue;
+ if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
+ WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
+}
+
+
+void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
+ int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
+ int mask = (1 << kTypeChangeChecksumBits) - 1;
+ value = InlinedTypeChangeChecksum::update(value, checksum & mask);
+ // Ensure packed bit field is in Smi range.
+ if (value > Smi::kMaxValue) value |= Smi::kMinValue;
+ if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
+ WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
+}
+
+
+int TypeFeedbackInfo::own_type_change_checksum() {
+ int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
+ return OwnTypeChangeChecksum::decode(value);
+}
+
+
+bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
+ int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
+ int mask = (1 << kTypeChangeChecksumBits) - 1;
+ return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
+}
+
+
ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
kTypeFeedbackCellsOffset)
inline void set_ic_total_count(int count);
inline int ic_with_type_info_count();
- inline void set_ic_with_type_info_count(int count);
+ inline void change_ic_with_type_info_count(int count);
+
+ inline void initialize_storage();
+
+ inline void change_own_type_change_checksum();
+ inline int own_type_change_checksum();
+
+ inline void set_inlined_type_change_checksum(int checksum);
+ inline bool matches_inlined_type_change_checksum(int checksum);
DECL_ACCESSORS(type_feedback_cells, TypeFeedbackCells)
void TypeFeedbackInfoVerify();
#endif
- static const int kIcTotalCountOffset = HeapObject::kHeaderSize;
- static const int kIcWithTypeinfoCountOffset =
- kIcTotalCountOffset + kPointerSize;
- static const int kTypeFeedbackCellsOffset =
- kIcWithTypeinfoCountOffset + kPointerSize;
+ static const int kStorage1Offset = HeapObject::kHeaderSize;
+ static const int kStorage2Offset = kStorage1Offset + kPointerSize;
+ static const int kTypeFeedbackCellsOffset = kStorage2Offset + kPointerSize;
static const int kSize = kTypeFeedbackCellsOffset + kPointerSize;
private:
+ static const int kTypeChangeChecksumBits = 7;
+
+ class ICTotalCountField: public BitField<int, 0,
+ kSmiValueSize - kTypeChangeChecksumBits> {}; // NOLINT
+ class OwnTypeChangeChecksum: public BitField<int,
+ kSmiValueSize - kTypeChangeChecksumBits,
+ kTypeChangeChecksumBits> {}; // NOLINT
+ class ICsWithTypeInfoCountField: public BitField<int, 0,
+ kSmiValueSize - kTypeChangeChecksumBits> {}; // NOLINT
+ class InlinedTypeChangeChecksum: public BitField<int,
+ kSmiValueSize - kTypeChangeChecksumBits,
+ kTypeChangeChecksumBits> {}; // NOLINT
+
DISALLOW_IMPLICIT_CONSTRUCTORS(TypeFeedbackInfo);
};
--- /dev/null
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// The original problem from the bug: In the example below SMI check for b
+// generated for inlining of equals invocation (marked with (*)) will be hoisted
+// out of the loop across the typeof b === "object" condition and cause an
+// immediate deopt. Another problem here is that no matter how many time we
+// deopt and reopt we will continue to produce the wrong code.
+//
+// The fix is to notice when a deopt and subsequent reopt doesn't find
+// additional type information, indicating that optimistic LICM should be
+// disabled during compilation.
+
+function eq(a, b) {
+ if (typeof b === "object") {
+ return b.equals(a); // (*)
+ }
+ return a === b;
+}
+
+Object.prototype.equals = function (other) {
+ return (this === other);
+};
+
+function test() {
+ for (var i = 0; !eq(i, 10); i++)
+ ;
+}
+
+eq({}, {});
+eq({}, {});
+eq(1, 1);
+eq(1, 1);
+test();
+%OptimizeFunctionOnNextCall(test);
+test();
+%OptimizeFunctionOnNextCall(test);
+// Second compilation should have noticed that LICM wasn't a good idea, and now
+// function should no longer deopt when called.
+test();
+assertTrue(2 != %GetOptimizationStatus(test));
+