[turbofan] Add SimplifiedOperator::Allocate operator.
authormstarzinger <mstarzinger@chromium.org>
Mon, 4 May 2015 12:07:12 +0000 (05:07 -0700)
committerCommit bot <commit-bot@chromium.org>
Mon, 4 May 2015 12:07:12 +0000 (12:07 +0000)
This introduces a simplified allocation operator which can be used to
model inline allocations in TurboFan. It is currently used for context
allocations, but still disabled because change lowering introduces
floating allocations outside the effect chain that interfere.

R=bmeurer@chromium.org

Review URL: https://codereview.chromium.org/1109773002

Cr-Commit-Position: refs/heads/master@{#28195}

17 files changed:
src/compiler/access-builder.cc
src/compiler/access-builder.h
src/compiler/js-typed-lowering.cc
src/compiler/js-typed-lowering.h
src/compiler/linkage.cc
src/compiler/opcodes.h
src/compiler/simplified-lowering.cc
src/compiler/simplified-lowering.h
src/compiler/simplified-operator.cc
src/compiler/simplified-operator.h
src/compiler/typer.cc
src/compiler/verifier.cc
src/flag-definitions.h
test/cctest/compiler/test-simplified-lowering.cc
test/unittests/compiler/js-typed-lowering-unittest.cc
test/unittests/compiler/node-test-utils.cc
test/unittests/compiler/node-test-utils.h

index 1462c48..0b1769b 100644 (file)
@@ -45,6 +45,13 @@ FieldAccess AccessBuilder::ForJSArrayBufferBackingStore() {
 
 
 // static
+FieldAccess AccessBuilder::ForFixedArrayLength() {
+  return {kTaggedBase, FixedArray::kLengthOffset, MaybeHandle<Name>(),
+          Type::TaggedSigned(), kMachAnyTagged};
+}
+
+
+// static
 FieldAccess AccessBuilder::ForExternalArrayPointer() {
   return {kTaggedBase, ExternalArray::kExternalPointerOffset,
           MaybeHandle<Name>(), Type::UntaggedPointer(), kMachPtr};
index 4d28db1..3939f83 100644 (file)
@@ -31,6 +31,9 @@ class AccessBuilder final : public AllStatic {
   // Provides access to JSArrayBuffer::backing_store() field.
   static FieldAccess ForJSArrayBufferBackingStore();
 
+  // Provides access to FixedArray::length() field.
+  static FieldAccess ForFixedArrayLength();
+
   // Provides access to ExternalArray::external_pointer() field.
   static FieldAccess ForExternalArrayPointer();
 
index 2fa39c4..51ddc17 100644 (file)
@@ -57,6 +57,61 @@ Reduction JSTypedLowering::ReplaceEagerly(Node* old, Node* node) {
 }
 
 
+// A helper class to construct inline allocations on the simplified operator
+// level. This keeps track of the effect chain for initial stores on a newly
+// allocated object and also provides helpers for commonly allocated objects.
+class AllocationBuilder final {
+ public:
+  AllocationBuilder(JSGraph* jsgraph, SimplifiedOperatorBuilder* simplified,
+                    Node* effect, Node* control)
+      : jsgraph_(jsgraph),
+        simplified_(simplified),
+        allocation_(nullptr),
+        effect_(effect),
+        control_(control) {}
+
+  // Primitive allocation of static size.
+  void Allocate(int size) {
+    allocation_ = graph()->NewNode(
+        simplified()->Allocate(), jsgraph()->Constant(size), effect_, control_);
+    effect_ = allocation_;
+  }
+
+  // Primitive store into a field.
+  void Store(const FieldAccess& access, Node* value) {
+    effect_ = graph()->NewNode(simplified()->StoreField(access), allocation_,
+                               value, effect_, control_);
+  }
+
+  // Compound allocation of a FixedArray.
+  void AllocateArray(int length, Handle<Map> map) {
+    Allocate(FixedArray::SizeFor(length));
+    Store(AccessBuilder::ForMap(), map);
+    Store(AccessBuilder::ForFixedArrayLength(), jsgraph()->Constant(length));
+  }
+
+  // Compound store of a constant into a field.
+  void Store(const FieldAccess& access, Handle<Object> value) {
+    Store(access, jsgraph()->Constant(value));
+  }
+
+  Node* allocation() const { return allocation_; }
+  Node* effect() const { return effect_; }
+
+ protected:
+  JSGraph* jsgraph() { return jsgraph_; }
+  Graph* graph() { return jsgraph_->graph(); }
+  SimplifiedOperatorBuilder* simplified() { return simplified_; }
+
+ private:
+  JSGraph* const jsgraph_;
+  SimplifiedOperatorBuilder* simplified_;
+  Node* allocation_;
+  Node* effect_;
+  Node* control_;
+};
+
+
 // A helper class to simplify the process of reducing a single binop node with a
 // JSOperator. This class manages the rewriting of context, control, and effect
 // dependencies during lowering of a binop and contains numerous helper
@@ -1019,6 +1074,79 @@ Reduction JSTypedLowering::ReduceJSCreateLiteralObject(Node* node) {
 }
 
 
+Reduction JSTypedLowering::ReduceJSCreateWithContext(Node* node) {
+  DCHECK_EQ(IrOpcode::kJSCreateWithContext, node->opcode());
+  Node* const input = NodeProperties::GetValueInput(node, 0);
+  Type* input_type = NodeProperties::GetBounds(input).upper;
+  if (FLAG_turbo_allocate && input_type->Is(Type::Receiver())) {
+    // JSCreateWithContext(o:receiver, f)
+    Node* const effect = NodeProperties::GetEffectInput(node);
+    Node* const control = NodeProperties::GetControlInput(node);
+    Node* const closure = NodeProperties::GetValueInput(node, 1);
+    Node* const context = NodeProperties::GetContextInput(node);
+    Node* const load = graph()->NewNode(
+        simplified()->LoadField(
+            AccessBuilder::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)),
+        context, effect, control);
+    AllocationBuilder a(jsgraph(), simplified(), effect, control);
+    STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == 4);  // Ensure fully covered.
+    a.AllocateArray(Context::MIN_CONTEXT_SLOTS, factory()->with_context_map());
+    a.Store(AccessBuilder::ForContextSlot(Context::CLOSURE_INDEX), closure);
+    a.Store(AccessBuilder::ForContextSlot(Context::PREVIOUS_INDEX), context);
+    a.Store(AccessBuilder::ForContextSlot(Context::EXTENSION_INDEX), input);
+    a.Store(AccessBuilder::ForContextSlot(Context::GLOBAL_OBJECT_INDEX), load);
+    // TODO(mstarzinger): We could mutate {node} into the allocation instead.
+    NodeProperties::SetBounds(a.allocation(), NodeProperties::GetBounds(node));
+    NodeProperties::ReplaceWithValue(node, node, a.effect());
+    node->ReplaceInput(0, a.allocation());
+    node->ReplaceInput(1, a.effect());
+    node->set_op(common()->Finish(1));
+    node->TrimInputCount(2);
+    return Changed(node);
+  }
+  return NoChange();
+}
+
+
+Reduction JSTypedLowering::ReduceJSCreateBlockContext(Node* node) {
+  DCHECK_EQ(IrOpcode::kJSCreateBlockContext, node->opcode());
+  Node* const input = NodeProperties::GetValueInput(node, 0);
+  HeapObjectMatcher<ScopeInfo> minput(input);
+  DCHECK(minput.HasValue());  // TODO(mstarzinger): Make ScopeInfo static.
+  int context_length = minput.Value().handle()->ContextLength();
+  if (FLAG_turbo_allocate && context_length < kBlockContextAllocationLimit) {
+    // JSCreateBlockContext(s:scope[length < limit], f)
+    Node* const effect = NodeProperties::GetEffectInput(node);
+    Node* const control = NodeProperties::GetControlInput(node);
+    Node* const closure = NodeProperties::GetValueInput(node, 1);
+    Node* const context = NodeProperties::GetContextInput(node);
+    Node* const load = graph()->NewNode(
+        simplified()->LoadField(
+            AccessBuilder::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)),
+        context, effect, control);
+    AllocationBuilder a(jsgraph(), simplified(), effect, control);
+    STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == 4);  // Ensure fully covered.
+    a.AllocateArray(context_length, factory()->block_context_map());
+    a.Store(AccessBuilder::ForContextSlot(Context::CLOSURE_INDEX), closure);
+    a.Store(AccessBuilder::ForContextSlot(Context::PREVIOUS_INDEX), context);
+    a.Store(AccessBuilder::ForContextSlot(Context::EXTENSION_INDEX), input);
+    a.Store(AccessBuilder::ForContextSlot(Context::GLOBAL_OBJECT_INDEX), load);
+    for (int i = Context::MIN_CONTEXT_SLOTS; i < context_length; ++i) {
+      a.Store(AccessBuilder::ForContextSlot(i), jsgraph()->TheHoleConstant());
+    }
+    // TODO(mstarzinger): We could mutate {node} into the allocation instead.
+    NodeProperties::SetBounds(a.allocation(), NodeProperties::GetBounds(node));
+    NodeProperties::ReplaceWithValue(node, node, a.effect());
+    node->ReplaceInput(0, a.allocation());
+    node->ReplaceInput(1, a.effect());
+    node->set_op(common()->Finish(1));
+    node->TrimInputCount(2);
+    return Changed(node);
+  }
+  return NoChange();
+}
+
+
 Reduction JSTypedLowering::Reduce(Node* node) {
   // Check if the output type is a singleton.  In that case we already know the
   // result value and can simply replace the node if it's eliminable.
@@ -1111,6 +1239,10 @@ Reduction JSTypedLowering::Reduce(Node* node) {
       return ReduceJSCreateLiteralArray(node);
     case IrOpcode::kJSCreateLiteralObject:
       return ReduceJSCreateLiteralObject(node);
+    case IrOpcode::kJSCreateWithContext:
+      return ReduceJSCreateWithContext(node);
+    case IrOpcode::kJSCreateBlockContext:
+      return ReduceJSCreateBlockContext(node);
     default:
       break;
   }
index 855de8a..8000a93 100644 (file)
@@ -57,6 +57,8 @@ class JSTypedLowering final : public Reducer {
   Reduction ReduceJSCreateClosure(Node* node);
   Reduction ReduceJSCreateLiteralArray(Node* node);
   Reduction ReduceJSCreateLiteralObject(Node* node);
+  Reduction ReduceJSCreateWithContext(Node* node);
+  Reduction ReduceJSCreateBlockContext(Node* node);
   Reduction ReduceNumberBinop(Node* node, const Operator* numberOp);
   Reduction ReduceInt32Binop(Node* node, const Operator* intOp);
   Reduction ReduceUI32Shift(Node* node, Signedness left_signedness,
@@ -77,6 +79,9 @@ class JSTypedLowering final : public Reducer {
   SimplifiedOperatorBuilder* simplified() { return &simplified_; }
   MachineOperatorBuilder* machine() const;
 
+  // Limits up to which context allocations are inlined.
+  static const int kBlockContextAllocationLimit = 16;
+
   JSGraph* jsgraph_;
   SimplifiedOperatorBuilder simplified_;
   ZoneVector<Node*> conversions_;  // Cache inserted JSToXXX() conversions.
index 0288f1a..08fef67 100644 (file)
@@ -110,6 +110,7 @@ bool Linkage::NeedsFrameState(Runtime::FunctionId function) {
   // not to call into arbitrary JavaScript, not to throw, and not to deoptimize
   // are blacklisted here and can be called without a FrameState.
   switch (function) {
+    case Runtime::kAllocateInTargetSpace:
     case Runtime::kDefineClassMethod:              // TODO(jarin): Is it safe?
     case Runtime::kDefineGetterPropertyUnchecked:  // TODO(jarin): Is it safe?
     case Runtime::kDefineSetterPropertyUnchecked:  // TODO(jarin): Is it safe?
index e5b978a..5609970 100644 (file)
   V(ChangeFloat64ToTagged)         \
   V(ChangeBoolToBit)               \
   V(ChangeBitToBool)               \
+  V(Allocate)                      \
   V(LoadField)                     \
   V(LoadBuffer)                    \
   V(LoadElement)                   \
index d1ab441..c0dda8c 100644 (file)
@@ -783,6 +783,13 @@ class RepresentationSelector {
         if (lower()) lowering->DoStringAdd(node);
         break;
       }
+      case IrOpcode::kAllocate: {
+        ProcessInput(node, 0, kMachAnyTagged);
+        ProcessRemainingInputs(node, 1);
+        SetOutput(node, kMachAnyTagged);
+        if (lower()) lowering->DoAllocate(node);
+        break;
+      }
       case IrOpcode::kLoadField: {
         FieldAccess access = FieldAccessOf(node->op());
         ProcessInput(node, 0, changer_->TypeForBasePointer(access));
@@ -1159,6 +1166,23 @@ WriteBarrierKind ComputeWriteBarrierKind(BaseTaggedness base_is_tagged,
 }  // namespace
 
 
+void SimplifiedLowering::DoAllocate(Node* node) {
+  PretenureFlag pretenure = OpParameter<PretenureFlag>(node->op());
+  AllocationSpace space = pretenure == TENURED ? OLD_SPACE : NEW_SPACE;
+  Runtime::FunctionId f = Runtime::kAllocateInTargetSpace;
+  Operator::Properties props = node->op()->properties();
+  CallDescriptor* desc = Linkage::GetRuntimeCallDescriptor(zone(), f, 2, props);
+  node->set_op(common()->Call(desc));
+  ExternalReference ref(f, jsgraph()->isolate());
+  int32_t flags = AllocateTargetSpace::encode(space);
+  node->InsertInput(graph()->zone(), 0, jsgraph()->CEntryStubConstant(1));
+  node->InsertInput(graph()->zone(), 2, jsgraph()->SmiConstant(flags));
+  node->InsertInput(graph()->zone(), 3, jsgraph()->ExternalConstant(ref));
+  node->InsertInput(graph()->zone(), 4, jsgraph()->Int32Constant(2));
+  node->InsertInput(graph()->zone(), 5, jsgraph()->NoContextConstant());
+}
+
+
 void SimplifiedLowering::DoLoadField(Node* node) {
   const FieldAccess& access = FieldAccessOf(node->op());
   node->set_op(machine()->Load(access.machine_type));
@@ -1303,7 +1327,6 @@ void SimplifiedLowering::DoStringAdd(Node* node) {
 
 
 Node* SimplifiedLowering::StringComparison(Node* node, bool requires_ordering) {
-  CEntryStub stub(jsgraph()->isolate(), 1);
   Runtime::FunctionId f =
       requires_ordering ? Runtime::kStringCompareRT : Runtime::kStringEquals;
   ExternalReference ref(f, jsgraph()->isolate());
@@ -1312,12 +1335,12 @@ Node* SimplifiedLowering::StringComparison(Node* node, bool requires_ordering) {
   // interface descriptor is available for it.
   CallDescriptor* desc = Linkage::GetRuntimeCallDescriptor(zone(), f, 2, props);
   return graph()->NewNode(common()->Call(desc),
-                          jsgraph()->HeapConstant(stub.GetCode()),
+                          jsgraph()->CEntryStubConstant(1),
                           NodeProperties::GetValueInput(node, 0),
                           NodeProperties::GetValueInput(node, 1),
                           jsgraph()->ExternalConstant(ref),
                           jsgraph()->Int32Constant(2),
-                          jsgraph()->UndefinedConstant());
+                          jsgraph()->NoContextConstant());
 }
 
 
index 9ffdde5..124090e 100644 (file)
@@ -28,6 +28,7 @@ class SimplifiedLowering final {
   void LowerAllNodes();
 
   // TODO(titzer): These are exposed for direct testing. Use a friend class.
+  void DoAllocate(Node* node);
   void DoLoadField(Node* node);
   void DoStoreField(Node* node);
   // TODO(turbofan): The output_type can be removed once the result of the
index 17a813e..9b34668 100644 (file)
@@ -242,6 +242,13 @@ const Operator* SimplifiedOperatorBuilder::ReferenceEqual(Type* type) {
 }
 
 
+const Operator* SimplifiedOperatorBuilder::Allocate(PretenureFlag pretenure) {
+  return new (zone())
+      Operator1<PretenureFlag>(IrOpcode::kAllocate, Operator::kNoThrow,
+                               "Allocate", 1, 1, 1, 1, 1, 0, pretenure);
+}
+
+
 const Operator* SimplifiedOperatorBuilder::LoadBuffer(BufferAccess access) {
   switch (access.external_array_type()) {
 #define LOAD_BUFFER(Type, type, TYPE, ctype, size) \
index 08cd0ec..484b39b 100644 (file)
@@ -163,6 +163,8 @@ class SimplifiedOperatorBuilder final {
   const Operator* ObjectIsSmi();
   const Operator* ObjectIsNonNegativeSmi();
 
+  const Operator* Allocate(PretenureFlag pretenure = NOT_TENURED);
+
   const Operator* LoadField(FieldAccess const&);
   const Operator* StoreField(FieldAccess const&);
 
index 6fd609f..7f176fe 100644 (file)
@@ -1784,6 +1784,11 @@ Bounds Typer::Visitor::TypeChangeBitToBool(Node* node) {
 }
 
 
+Bounds Typer::Visitor::TypeAllocate(Node* node) {
+  return Bounds(Type::TaggedPointer());
+}
+
+
 Bounds Typer::Visitor::TypeLoadField(Node* node) {
   return Bounds(FieldAccessOf(node->op()).type);
 }
index 238de2f..b86bba9 100644 (file)
@@ -635,6 +635,10 @@ void Verifier::Visitor::Check(Node* node) {
       CheckValueInputIs(node, 0, Type::Any());
       CheckUpperIs(node, Type::Boolean());
       break;
+    case IrOpcode::kAllocate:
+      CheckValueInputIs(node, 0, Type::PlainNumber());
+      CheckUpperIs(node, Type::TaggedPointer());
+      break;
 
     case IrOpcode::kChangeTaggedToInt32: {
       // Signed32 /\ Tagged -> Signed32 /\ UntaggedInt32
index 62fd198..25cb5f2 100644 (file)
@@ -402,6 +402,7 @@ DEFINE_BOOL(turbo_stats, false, "print TurboFan statistics")
 DEFINE_BOOL(turbo_splitting, true, "split nodes during scheduling in TurboFan")
 DEFINE_BOOL(turbo_types, true, "use typed lowering in TurboFan")
 DEFINE_BOOL(turbo_type_feedback, false, "use type feedback in TurboFan")
+DEFINE_BOOL(turbo_allocate, false, "enable inline allocations in TurboFan")
 DEFINE_BOOL(turbo_source_positions, false,
             "track source code positions when building TurboFan IR")
 DEFINE_IMPLICATION(trace_turbo, turbo_source_positions)
index d1c8775..eb730cf 100644 (file)
@@ -22,6 +22,7 @@
 #include "src/scopes.h"
 #include "test/cctest/cctest.h"
 #include "test/cctest/compiler/codegen-tester.h"
+#include "test/cctest/compiler/function-tester.h"
 #include "test/cctest/compiler/graph-builder-tester.h"
 #include "test/cctest/compiler/value-helper.h"
 
@@ -77,6 +78,17 @@ class SimplifiedLoweringTester : public GraphBuilderTester<ReturnType> {
     CHECK(factory()->NewNumber(expected)->SameValue(result));
   }
 
+  template <typename T>
+  T* CallWithPotentialGC() {
+    // TODO(titzer): we wrap the code in a JSFunction here to reuse the
+    // JSEntryStub; that could be done with a special prologue or other stub.
+    Handle<JSFunction> fun = FunctionTester::ForMachineGraph(this->graph());
+    Handle<Object>* args = NULL;
+    MaybeHandle<Object> result = Execution::Call(
+        this->isolate(), fun, factory()->undefined_value(), 0, args, false);
+    return T::cast(*result.ToHandleChecked());
+  }
+
   Factory* factory() { return this->isolate()->factory(); }
   Heap* heap() { return this->isolate()->heap(); }
 };
@@ -650,6 +662,31 @@ TEST(RunAccessTests_Smi) {
 }
 
 
+TEST(RunAllocate) {
+  PretenureFlag flag[] = {NOT_TENURED, TENURED};
+
+  for (size_t i = 0; i < arraysize(flag); i++) {
+    SimplifiedLoweringTester<HeapObject*> t;
+    FieldAccess access = AccessBuilder::ForMap();
+    Node* size = t.jsgraph.Constant(HeapNumber::kSize);
+    Node* alloc = t.NewNode(t.simplified()->Allocate(flag[i]), size);
+    Node* map = t.jsgraph.Constant(t.factory()->heap_number_map());
+    t.StoreField(access, alloc, map);
+    t.Return(alloc);
+
+    t.LowerAllNodes();
+    t.GenerateCode();
+
+    if (Pipeline::SupportedTarget()) {
+      HeapObject* result = t.CallWithPotentialGC<HeapObject>();
+      CHECK(t.heap()->new_space()->Contains(result) || flag[i] == TENURED);
+      CHECK(t.heap()->old_space()->Contains(result) || flag[i] == NOT_TENURED);
+      CHECK(result->IsHeapNumber());
+    }
+  }
+}
+
+
 // Fills in most of the nodes of the graph in order to make tests shorter.
 class TestingGraph : public HandleAndZoneScope, public GraphAndBuilders {
  public:
index f4b5a3a..9e7b1ee 100644 (file)
@@ -967,6 +967,30 @@ TEST_F(JSTypedLoweringTest, JSCreateLiteralObject) {
 }
 #endif
 
+
+// -----------------------------------------------------------------------------
+// JSCreateWithContext
+
+
+TEST_F(JSTypedLoweringTest, JSCreateWithContext) {
+  FLAG_turbo_allocate = true;
+  Node* const object = Parameter(Type::Receiver());
+  Node* const closure = Parameter(Type::Any());
+  Node* const context = Parameter(Type::Any());
+  Node* const frame_state = EmptyFrameState();
+  Node* const effect = graph()->start();
+  Node* const control = graph()->start();
+  Reduction r =
+      Reduce(graph()->NewNode(javascript()->CreateWithContext(), object,
+                              closure, context, frame_state, effect, control));
+  ASSERT_TRUE(r.Changed());
+  EXPECT_THAT(r.replacement(),
+              IsFinish(IsAllocate(IsNumberConstant(Context::SizeFor(
+                                      Context::MIN_CONTEXT_SLOTS)),
+                                  effect, control),
+                       _));
+}
+
 }  // namespace compiler
 }  // namespace internal
 }  // namespace v8
index 18d3d4b..44ec06a 100644 (file)
@@ -659,6 +659,33 @@ class IsCallMatcher final : public NodeMatcher {
 };
 
 
+class IsAllocateMatcher final : public NodeMatcher {
+ public:
+  IsAllocateMatcher(const Matcher<Node*>& size_matcher,
+                    const Matcher<Node*>& effect_matcher,
+                    const Matcher<Node*>& control_matcher)
+      : NodeMatcher(IrOpcode::kAllocate),
+        size_matcher_(size_matcher),
+        effect_matcher_(effect_matcher),
+        control_matcher_(control_matcher) {}
+
+  bool MatchAndExplain(Node* node, MatchResultListener* listener) const final {
+    return (NodeMatcher::MatchAndExplain(node, listener) &&
+            PrintMatchAndExplain(NodeProperties::GetValueInput(node, 0), "size",
+                                 size_matcher_, listener) &&
+            PrintMatchAndExplain(NodeProperties::GetEffectInput(node), "effect",
+                                 effect_matcher_, listener) &&
+            PrintMatchAndExplain(NodeProperties::GetControlInput(node),
+                                 "control", control_matcher_, listener));
+  }
+
+ private:
+  const Matcher<Node*> size_matcher_;
+  const Matcher<Node*> effect_matcher_;
+  const Matcher<Node*> control_matcher_;
+};
+
+
 class IsLoadFieldMatcher final : public NodeMatcher {
  public:
   IsLoadFieldMatcher(const Matcher<FieldAccess>& access_matcher,
@@ -1470,6 +1497,14 @@ Matcher<Node*> IsCall(
 }
 
 
+Matcher<Node*> IsAllocate(const Matcher<Node*>& size_matcher,
+                          const Matcher<Node*>& effect_matcher,
+                          const Matcher<Node*>& control_matcher) {
+  return MakeMatcher(
+      new IsAllocateMatcher(size_matcher, effect_matcher, control_matcher));
+}
+
+
 Matcher<Node*> IsLoadField(const Matcher<FieldAccess>& access_matcher,
                            const Matcher<Node*>& base_matcher,
                            const Matcher<Node*>& effect_matcher,
index 1fb19a1..085df3e 100644 (file)
@@ -127,6 +127,9 @@ Matcher<Node*> IsNumberSubtract(const Matcher<Node*>& lhs_matcher,
                                 const Matcher<Node*>& rhs_matcher);
 Matcher<Node*> IsNumberMultiply(const Matcher<Node*>& lhs_matcher,
                                 const Matcher<Node*>& rhs_matcher);
+Matcher<Node*> IsAllocate(const Matcher<Node*>& size_matcher,
+                          const Matcher<Node*>& effect_matcher,
+                          const Matcher<Node*>& control_matcher);
 Matcher<Node*> IsLoadField(const Matcher<FieldAccess>& access_matcher,
                            const Matcher<Node*>& base_matcher,
                            const Matcher<Node*>& effect_matcher,