From ed573a8e08fadaa611d568294df14c0a96dc4a81 Mon Sep 17 00:00:00 2001 From: Jiewen Tan Date: Thu, 26 Aug 2021 16:49:13 -0700 Subject: [PATCH] Enable test_api IMethodTest in OSS (#63345) Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/63345 This diff did the following few things to enable the tests: 1. Exposed IMethod as TORCH_API. 2. Linked torch_deploy to test_api if USE_DEPLOY == 1. 3. Generated torch::deploy examples when building torch_deploy library. Test Plan: ./build/bin/test_api --gtest_filter=IMethodTest.* Reviewed By: ngimel Differential Revision: D30346257 Pulled By: alanwaketan fbshipit-source-id: 932ae7d45790dfb6e00c51893933a054a0fad86d --- .jenkins/pytorch/test.sh | 5 ++- test/cpp/api/CMakeLists.txt | 8 +++++ test/cpp/api/imethod.cpp | 44 ++++++++++++++++---------- torch/csrc/api/include/torch/imethod.h | 2 +- 4 files changed, 40 insertions(+), 19 deletions(-) diff --git a/.jenkins/pytorch/test.sh b/.jenkins/pytorch/test.sh index daa0da7eec..4eb1b35253 100755 --- a/.jenkins/pytorch/test.sh +++ b/.jenkins/pytorch/test.sh @@ -253,6 +253,7 @@ test_libtorch() { ln -sf "$TORCH_LIB_DIR"/libbackend_with_compiler.so "$TORCH_BIN_DIR" ln -sf "$TORCH_LIB_DIR"/libjitbackend_test.so "$TORCH_BIN_DIR" ln -sf "$TORCH_LIB_DIR"/libc10* "$TORCH_BIN_DIR" + ln -sf "$TORCH_LIB_DIR"/libshm* "$TORCH_BIN_DIR" ln -sf "$TORCH_LIB_DIR"/libtorch* "$TORCH_BIN_DIR" ln -sf "$TORCH_LIB_DIR"/libtbb* "$TORCH_BIN_DIR" @@ -275,7 +276,8 @@ test_libtorch() { python test/cpp/jit/tests_setup.py shutdown # Wait for background download to finish wait - OMP_NUM_THREADS=2 TORCH_CPP_TEST_MNIST_PATH="test/cpp/api/mnist" "$TORCH_BIN_DIR"/test_api --gtest_output=xml:$TEST_REPORTS_DIR/test_api.xml + # Exclude IMethodTest that relies on torch::deploy, which will instead be ran in test_deploy. + OMP_NUM_THREADS=2 TORCH_CPP_TEST_MNIST_PATH="test/cpp/api/mnist" "$TORCH_BIN_DIR"/test_api --gtest_filter='-IMethodTest.*' --gtest_output=xml:$TEST_REPORTS_DIR/test_api.xml "$TORCH_BIN_DIR"/test_tensorexpr --gtest_output=xml:$TEST_REPORTS_DIR/test_tensorexpr.xml "$TORCH_BIN_DIR"/test_mobile_nnc --gtest_output=xml:$TEST_REPORTS_DIR/test_mobile_nnc.xml if [[ "${BUILD_ENVIRONMENT}" == pytorch-linux-xenial-py3* ]]; then @@ -488,6 +490,7 @@ test_torch_deploy() { ln -sf "$TORCH_LIB_DIR"/libshm* "$TORCH_BIN_DIR" ln -sf "$TORCH_LIB_DIR"/libc10* "$TORCH_BIN_DIR" "$TORCH_BIN_DIR"/test_deploy + "$TORCH_BIN_DIR"/test_api --gtest_filter='IMethodTest.*' assert_git_not_dirty } diff --git a/test/cpp/api/CMakeLists.txt b/test/cpp/api/CMakeLists.txt index 9bd9d6780f..fc21afaef6 100644 --- a/test/cpp/api/CMakeLists.txt +++ b/test/cpp/api/CMakeLists.txt @@ -41,6 +41,10 @@ set(TORCH_API_TEST_SOURCES ${TORCH_API_TEST_DIR}/grad_mode.cpp ) +if(USE_DEPLOY) + list(APPEND TORCH_API_TEST_SOURCES ${TORCH_API_TEST_DIR}/imethod.cpp) +endif() + if(USE_CUDA) list(APPEND TORCH_API_TEST_SOURCES ${TORCH_API_TEST_DIR}/parallel.cpp) endif() @@ -59,6 +63,10 @@ if(USE_CUDA) target_compile_definitions(test_api PRIVATE "USE_CUDA") endif() +if(USE_DEPLOY) + target_link_libraries(test_api PRIVATE torch_deploy) +endif() + # Workaround for https://github.com/pytorch/pytorch/issues/40941 if(USE_OPENMP AND CMAKE_COMPILER_IS_GNUCXX AND (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8.0.0)) # Compiling transformer.cpp or pow_test.cpp with -O2+ and both -fuse-openmp and -faligned-newout any optimization diff --git a/test/cpp/api/imethod.cpp b/test/cpp/api/imethod.cpp index 3349d1b3a8..8673e55fb5 100644 --- a/test/cpp/api/imethod.cpp +++ b/test/cpp/api/imethod.cpp @@ -8,30 +8,40 @@ using namespace ::testing; using namespace caffe2; -// TODO(T96218435): Enable the following tests in OSS. +const char* simple = "torch/csrc/deploy/example/generated/simple"; +const char* simpleJit = "torch/csrc/deploy/example/generated/simple_jit"; + +// TODO(jwtan): Try unifying cmake and buck for getting the path. +const char* path(const char* envname, const char* path) { + const char* env = getenv(envname); + return env ? env : path; +} + +// Run `python torch/csrc/deploy/example/generate_examples.py` before running the following tests. +// TODO(jwtan): Figure out a way to automate the above step for development. (CI has it already.) TEST(IMethodTest, CallMethod) { - auto script_model = torch::jit::load(getenv("SIMPLE_JIT")); - auto script_method = script_model.get_method("forward"); + auto scriptModel = torch::jit::load(path("SIMPLE_JIT", simpleJit)); + auto scriptMethod = scriptModel.get_method("forward"); torch::deploy::InterpreterManager manager(3); - torch::deploy::Package p = manager.load_package(getenv("SIMPLE")); - auto py_model = p.load_pickle("model", "model.pkl"); - torch::deploy::PythonMethodWrapper py_method(py_model, "forward"); + torch::deploy::Package package = manager.load_package(path("SIMPLE", simple)); + auto pyModel = package.load_pickle("model", "model.pkl"); + torch::deploy::PythonMethodWrapper pyMethod(pyModel, "forward"); auto input = torch::ones({10, 20}); - auto output_py = py_method({input}); - auto output_script = script_method({input}); - EXPECT_TRUE(output_py.isTensor()); - EXPECT_TRUE(output_script.isTensor()); - auto output_py_tensor = output_py.toTensor(); - auto output_script_tensor = output_script.toTensor(); - - EXPECT_TRUE(output_py_tensor.equal(output_script_tensor)); - EXPECT_EQ(output_py_tensor.numel(), 200); + auto outputPy = pyMethod({input}); + auto outputScript = scriptMethod({input}); + EXPECT_TRUE(outputPy.isTensor()); + EXPECT_TRUE(outputScript.isTensor()); + auto outputPyTensor = outputPy.toTensor(); + auto outputScriptTensor = outputScript.toTensor(); + + EXPECT_TRUE(outputPyTensor.equal(outputScriptTensor)); + EXPECT_EQ(outputPyTensor.numel(), 200); } TEST(IMethodTest, GetArgumentNames) { - auto scriptModel = torch::jit::load(getenv("SIMPLE_JIT")); + auto scriptModel = torch::jit::load(path("SIMPLE_JIT", simpleJit)); auto scriptMethod = scriptModel.get_method("forward"); auto& scriptNames = scriptMethod.getArgumentNames(); @@ -39,7 +49,7 @@ TEST(IMethodTest, GetArgumentNames) { EXPECT_STREQ(scriptNames[0].c_str(), "input"); torch::deploy::InterpreterManager manager(3); - torch::deploy::Package package = manager.load_package(getenv("SIMPLE")); + torch::deploy::Package package = manager.load_package(path("SIMPLE", simple)); auto pyModel = package.load_pickle("model", "model.pkl"); torch::deploy::PythonMethodWrapper pyMethod(pyModel, "forward"); diff --git a/torch/csrc/api/include/torch/imethod.h b/torch/csrc/api/include/torch/imethod.h index dfabf50ce7..af010785a8 100644 --- a/torch/csrc/api/include/torch/imethod.h +++ b/torch/csrc/api/include/torch/imethod.h @@ -4,7 +4,7 @@ namespace torch { -class IMethod { +class TORCH_API IMethod { /* IMethod provides a portable interface for torch methods, whether they are backed by torchscript or python/deploy. -- 2.34.1