From b6544ef81511cc527e4945a969d8a680d2d5069f Mon Sep 17 00:00:00 2001 From: Scott Wolchok Date: Wed, 8 Sep 2021 18:30:14 -0700 Subject: [PATCH] [PyTorch] Fix MobileDebugInfo vector copy (#64030) Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/64030 ghstack-source-id: 137566816 Test Plan: Pixel 3 before: https://our.intern.facebook.com/intern/aibench/details/320277034999340 Pixel 3 after: https://our.intern.facebook.com/intern/aibench/details/724509739115867 can see the vector copy disappear in the flame graph. Overall mean decreased from 354 ms to 348 ms (though I'm not sure if this is outside usual noise). Reviewed By: raziel Differential Revision: D30559032 fbshipit-source-id: 6d8bb5396d3449cc63023ee7acf694b5d146ddc1 --- torch/csrc/jit/mobile/debug_info.cpp | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/torch/csrc/jit/mobile/debug_info.cpp b/torch/csrc/jit/mobile/debug_info.cpp index a75ffe1..50bfd8a 100644 --- a/torch/csrc/jit/mobile/debug_info.cpp +++ b/torch/csrc/jit/mobile/debug_info.cpp @@ -122,13 +122,14 @@ MobileDebugTable::MobileDebugTable( size_t debug_size{0}; std::tie(debug_data, debug_size) = reader->getRecord(record_name); auto ivalues = - jit::unpickle( - reinterpret_cast(debug_data.get()), debug_size) - .toTuple() - ->elements(); + std::move( + *jit::unpickle( + reinterpret_cast(debug_data.get()), debug_size) + .toTuple()) + .elements(); SourceRangeDeserializer deserializer; for (auto& val : ivalues) { - auto tup_elems = val.toTuple()->elements(); + auto tup_elems = std::move(*std::move(val).toTuple()).elements(); // For BC we decode only tuples with 3 elements // assuming it contains // byte_offset, debug_handle (=source range tag), source range -- 2.7.4