Profiler experiments: self-optimization for functions containing calls
authorjkummerow@chromium.org <jkummerow@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Wed, 22 Feb 2012 10:48:58 +0000 (10:48 +0000)
committerjkummerow@chromium.org <jkummerow@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Wed, 22 Feb 2012 10:48:58 +0000 (10:48 +0000)
Review URL: https://chromiumcodereview.appspot.com/9430042

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@10787 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/arm/full-codegen-arm.cc
src/ast.cc
src/ia32/full-codegen-ia32.cc
src/mips/full-codegen-mips.cc
src/x64/full-codegen-x64.cc

index 8ba9d38..6372738 100644 (file)
@@ -140,7 +140,7 @@ void FullCodeGenerator::Generate() {
   // We can optionally optimize based on counters rather than statistical
   // sampling.
   if (info->ShouldSelfOptimize()) {
-    if (FLAG_trace_opt) {
+    if (FLAG_trace_opt_verbose) {
       PrintF("[adding self-optimization header to %s]\n",
              *info->function()->debug_name()->ToCString());
     }
index 7e886fa..c98aaa9 100644 (file)
@@ -1018,6 +1018,8 @@ INCREASE_NODE_COUNT(CountOperation)
 INCREASE_NODE_COUNT(BinaryOperation)
 INCREASE_NODE_COUNT(CompareOperation)
 INCREASE_NODE_COUNT(ThisFunction)
+INCREASE_NODE_COUNT(Call)
+INCREASE_NODE_COUNT(CallNew)
 
 #undef INCREASE_NODE_COUNT
 
@@ -1119,21 +1121,8 @@ void AstConstructionVisitor::VisitArrayLiteral(ArrayLiteral* node) {
 }
 
 
-void AstConstructionVisitor::VisitCall(Call* node) {
-  increase_node_count();
-  add_flag(kDontSelfOptimize);
-}
-
-
-void AstConstructionVisitor::VisitCallNew(CallNew* node) {
-  increase_node_count();
-  add_flag(kDontSelfOptimize);
-}
-
-
 void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) {
   increase_node_count();
-  add_flag(kDontSelfOptimize);
   if (node->is_jsruntime()) {
     // Don't try to inline JS runtime calls because we don't (currently) even
     // optimize them.
index 511b814..98c80cd 100644 (file)
@@ -132,7 +132,7 @@ void FullCodeGenerator::Generate() {
   // We can optionally optimize based on counters rather than statistical
   // sampling.
   if (info->ShouldSelfOptimize()) {
-    if (FLAG_trace_opt) {
+    if (FLAG_trace_opt_verbose) {
       PrintF("[adding self-optimization header to %s]\n",
              *info->function()->debug_name()->ToCString());
     }
index cbfe64b..fd0f487 100644 (file)
@@ -150,7 +150,7 @@ void FullCodeGenerator::Generate() {
   // We can optionally optimize based on counters rather than statistical
   // sampling.
   if (info->ShouldSelfOptimize()) {
-    if (FLAG_trace_opt) {
+    if (FLAG_trace_opt_verbose) {
       PrintF("[adding self-optimization header to %s]\n",
              *info->function()->debug_name()->ToCString());
     }
index 89db84e..a582e90 100644 (file)
@@ -130,7 +130,7 @@ void FullCodeGenerator::Generate() {
   // We can optionally optimize based on counters rather than statistical
   // sampling.
   if (info->ShouldSelfOptimize()) {
-    if (FLAG_trace_opt) {
+    if (FLAG_trace_opt_verbose) {
       PrintF("[adding self-optimization header to %s]\n",
              *info->function()->debug_name()->ToCString());
     }