[PyTorch] Copy vectors less in Function::append_operator (#63977)
authorScott Wolchok <swolchok@fb.com>
Thu, 9 Sep 2021 01:30:14 +0000 (18:30 -0700)
committerFacebook GitHub Bot <facebook-github-bot@users.noreply.github.com>
Thu, 9 Sep 2021 01:31:38 +0000 (18:31 -0700)
Summary:
Pull Request resolved: https://github.com/pytorch/pytorch/pull/63977

Doesn't seem to be any reason to copy these argument vectors.
ghstack-source-id: 137566815

Test Plan: CI

Reviewed By: dhruvbird, raziel

Differential Revision: D30550301

fbshipit-source-id: 33c199f975e4fb62c50a8210dc08aa9bb7a3e2f2

torch/csrc/jit/mobile/function.cpp

index fad8c39..434bb8d 100644 (file)
@@ -65,16 +65,16 @@ bool Function::append_operator(
   }
 
   auto jit_op = findOperatorFor(opname);
-  std::vector<c10::Argument> args;
+  const std::vector<c10::Argument>* pArgs = nullptr;
   if (jit_op) {
     fn = [jit_op](Stack& stack) { jit_op->getOperation()(stack); };
-    args = jit_op->schema().arguments();
+    pArgs = &jit_op->schema().arguments();
   } else {
     auto op = c10::Dispatcher::singleton().findSchema(opname_c10);
     if (op.has_value()) {
       fn = [op](Stack& stack) { op->callBoxed(&stack); };
       if (op->hasSchema()) {
-        args = op->schema().arguments();
+        pArgs = &op->schema().arguments();
       } else {
         TORCH_CHECK(false, "arguments are missing for operator ", opname);
       }
@@ -83,6 +83,8 @@ bool Function::append_operator(
     }
   }
 
+  TORCH_INTERNAL_ASSERT_DEBUG_ONLY(pArgs);
+  const auto& args = *pArgs;
   if (model_version == 0x3LL &&
       opname == c10::OperatorName("aten::_convolution", "")) {
     // Since byte-code versions 0x4L, convolution has an additional