Add IDeserializer support in pyarmnn
authorwangg <gemini910621@gmail.com>
Wed, 26 Aug 2020 01:44:32 +0000 (01:44 +0000)
committerJim Flynn <jim.flynn@arm.com>
Wed, 21 Oct 2020 12:16:32 +0000 (12:16 +0000)
Resources required for new unit tests are included for review.

Signed-off-by: Guanqun Wang gemini910621@gmail.com
Change-Id: Iead6cb5beaf824a6f467ad9da4aede5719ebe4ec

CMakeLists.txt
python/pyarmnn/setup.py
python/pyarmnn/src/pyarmnn/__init__.py
python/pyarmnn/src/pyarmnn/swig/armnn_deserializer.i [new file with mode: 0644]
python/pyarmnn/swig_generate.py
python/pyarmnn/test/test_deserializer.py [new file with mode: 0644]
python/pyarmnn/test/test_generated.py
python/pyarmnn/test/testdata/shared/deserializer/golden_output_lite.npy [new file with mode: 0644]
python/pyarmnn/test/testdata/shared/deserializer/input_lite.npy [new file with mode: 0644]
python/pyarmnn/test/testdata/shared/mock_model.armnn [new file with mode: 0644]
src/armnnDeserializer/CMakeLists.txt [new file with mode: 0755]

index cf12a57..06a9bb8 100644 (file)
@@ -27,6 +27,7 @@ include(GNUInstallDirs)
 add_subdirectory(samples)
 add_subdirectory(src/armnnTfLiteParser)
 add_subdirectory(src/armnnSerializer)
+add_subdirectory(src/armnnDeserializer)
 
 if (BUILD_TESTS)
     add_subdirectory(tests)
index e1a4ea4..94dd63e 100755 (executable)
@@ -285,6 +285,7 @@ if __name__ == '__main__':
     add_parsers_ext('OnnxParser', extensions_to_build)
     add_parsers_ext('TfParser', extensions_to_build)
     add_parsers_ext('TfLiteParser', extensions_to_build)
+    add_parsers_ext('Deserializer', extensions_to_build)
 
     setup(
         name='pyarmnn',
index b1aa81f..19b14a4 100644 (file)
@@ -59,6 +59,18 @@ except ImportError as err:
         """In case people try importing without having Arm NN built with this parser."""
         raise RuntimeError(message)
 
+try:
+    from ._generated.pyarmnn_deserializer import IDeserializer
+except ImportError as err:
+    logger = logging.getLogger(__name__)
+    message = "Your ArmNN library instance does not have an armnn models parser funcionality. "
+    logger.warning("%s Skipped IDeserializer import.", message)
+    logger.debug(str(err))
+
+    def IDeserializer():
+        """In case people try importing without having ArmNN built with this parser."""
+        raise RuntimeError(message)
+
 # Network
 from ._generated.pyarmnn import Optimize, OptimizerOptions, IOptimizedNetwork, IInputSlot, \
     IOutputSlot, IConnectableLayer, INetwork
diff --git a/python/pyarmnn/src/pyarmnn/swig/armnn_deserializer.i b/python/pyarmnn/src/pyarmnn/swig/armnn_deserializer.i
new file mode 100644 (file)
index 0000000..bc8228a
--- /dev/null
@@ -0,0 +1,120 @@
+//
+// Copyright © 2020 Arm Ltd and Contributors. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+%module pyarmnn_deserializer
+%{
+#include "armnnDeserializer/IDeserializer.hpp"
+#include "armnn/Types.hpp"
+#include "armnn/INetwork.hpp"
+#include "armnn/Exceptions.hpp"
+#include <string>
+#include <fstream>
+#include <sstream>
+%}
+
+//typemap definitions and other common stuff
+%include "standard_header.i"
+
+namespace std {
+    %template(BindingPointInfo) pair<int, armnn::TensorInfo>;
+    %template(MapStringTensorShape) map<std::string, armnn::TensorShape>;
+    %template(StringVector)         vector<string>;
+}
+
+namespace armnnDeserializer
+{
+%feature("docstring",
+"
+Interface for creating a parser object using ArmNN files.
+
+Parsers are used to automatically construct ArmNN graphs from model files.
+
+") IDeserializer;
+%nodefaultctor IDeserializer;
+class IDeserializer
+{
+public:
+};
+
+%extend IDeserializer {
+// This is not a substitution of the default constructor of the Armnn class. It tells swig to create custom __init__
+// method for ArmNN python object that will use static factory method to do the job.
+
+    IDeserializer() {
+        return armnnDeserializer::IDeserializer::CreateRaw();
+    }
+
+// The following does not replace a real destructor of the Armnn class.
+// It creates a functions that will be called when swig object goes out of the scope to clean resources.
+// so the user doesn't need to call IDeserializer::Destroy himself.
+// $self` is a pointer to extracted ArmNN IDeserializer object.
+
+    ~IDeserializer() {
+        armnnDeserializer::IDeserializer::Destroy($self);
+    }
+
+    %feature("docstring",
+    "
+    Create the network from a armnn binary file.
+
+    Args:
+        graphFile (str): Path to the armnn model to be parsed.
+
+    Returns:
+        INetwork: Parsed network.
+
+    Raises:
+        RuntimeError: If model file was not found.
+    ") CreateNetworkFromBinaryFile;
+
+    %newobject CreateNetworkFromBinary;
+    armnn::INetwork* CreateNetworkFromBinary(const char *graphFile) {
+        std::ifstream is(graphFile, std::ifstream::binary);
+        if (!is.good()) {
+            std::string locationString = CHECK_LOCATION().AsString();
+            std::stringstream msg;
+            msg << "Cannot read the file " << graphFile << locationString;
+            throw armnn::FileNotFoundException(msg.str());
+        }
+        return $self->CreateNetworkFromBinary(is).release();
+    }
+
+// Make both GetNetworkInputBindingInfo and GetNetworkOutputBindingInfo return a std::pair like other parsers instead of struct.
+
+    %feature("docstring",
+        "
+        Retrieve binding info (layer id and tensor info) for the network input identified by the given layer name and subgraph id.
+        Args:
+            subgraphId (int): The layer id. Any value is acceptable since it is unused in the current implementation.
+            name (str): Name of the input.
+
+        Returns:
+            tuple: (`int`, `TensorInfo`).
+        ") GetNetworkInputBindingInfo;
+    std::pair<int, armnn::TensorInfo> GetNetworkInputBindingInfo(unsigned int layerId, const std::string& name){
+        armnnDeserializer::BindingPointInfo info = $self->GetNetworkInputBindingInfo(layerId, name);
+        return std::make_pair(info.m_BindingId, info.m_TensorInfo);
+    }
+
+    %feature("docstring",
+        "
+        Retrieve binding info (layer id and `TensorInfo`) for the network output identified by the given layer name and subgraph id.
+
+        Args:
+            layerId (int): The layer id. Any value is acceptable since it is unused in the current implementation.
+            name (str): Name of the output.
+
+        Returns:
+            tuple: (`int`, `TensorInfo`).
+        ") GetNetworkOutputBindingInfo;
+    std::pair<int, armnn::TensorInfo> GetNetworkOutputBindingInfo(unsigned int layerId, const std::string& name){
+        armnnDeserializer::BindingPointInfo info = $self->GetNetworkOutputBindingInfo(layerId, name);
+        return std::make_pair(info.m_BindingId, info.m_TensorInfo);
+    }
+}
+
+} // end of namespace armnnDeserializer
+
+// Clear exception typemap.
+%exception;
index 7a59eda..72bccbf 100755 (executable)
@@ -105,12 +105,13 @@ if __name__ == "__main__":
 
     __verbose = args.verbose
 
-    wrap_names = ['armnn_version', 
-        'armnn', 
-        'armnn_caffeparser', 
-        'armnn_onnxparser', 
-        'armnn_tfparser', 
-        'armnn_tfliteparser']
+    wrap_names = ['armnn_version',
+        'armnn',
+        'armnn_caffeparser',
+        'armnn_onnxparser',
+        'armnn_tfparser',
+        'armnn_tfliteparser',
+        'armnn_deserializer']
 
     for n in wrap_names:
         generate_wrap(n, f"-I{' -I'.join(armnn_includes)} ")
diff --git a/python/pyarmnn/test/test_deserializer.py b/python/pyarmnn/test/test_deserializer.py
new file mode 100644 (file)
index 0000000..05aa733
--- /dev/null
@@ -0,0 +1,120 @@
+# Copyright © 2020 Arm Ltd and Contributors. All rights reserved.
+# SPDX-License-Identifier: MIT
+import os
+
+import pytest
+import pyarmnn as ann
+import numpy as np
+
+
+@pytest.fixture()
+def parser(shared_data_folder):
+    """
+    Parse and setup the test network to be used for the tests below
+    """
+    parser = ann.IDeserializer()
+    parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'mock_model.armnn'))
+
+    yield parser
+
+
+def test_deserializer_swig_destroy():
+    assert ann.IDeserializer.__swig_destroy__, "There is a swig python destructor defined"
+    assert ann.IDeserializer.__swig_destroy__.__name__ == "delete_IDeserializer"
+
+
+def test_check_deserializer_swig_ownership(parser):
+    # Check to see that SWIG has ownership for parser. This instructs SWIG to take
+    # ownership of the return value. This allows the value to be automatically
+    # garbage-collected when it is no longer in use
+    assert parser.thisown
+
+
+def test_deserializer_get_network_input_binding_info(parser):
+    # use 0 as a dummy value for layer_id, which is unused in the actual implementation
+    layer_id = 0
+    input_name = 'input_1'
+
+    input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name)
+
+    tensor = input_binding_info[1]
+    assert tensor.GetDataType() == 2
+    assert tensor.GetNumDimensions() == 4
+    assert tensor.GetNumElements() == 784
+    assert tensor.GetQuantizationOffset() == 128
+    assert tensor.GetQuantizationScale() == 0.007843137718737125
+
+
+def test_deserializer_get_network_output_binding_info(parser):
+    # use 0 as a dummy value for layer_id, which is unused in the actual implementation
+    layer_id = 0
+    output_name = "dense/Softmax"
+
+    output_binding_info1 = parser.GetNetworkOutputBindingInfo(layer_id, output_name)
+
+    # Check the tensor info retrieved from GetNetworkOutputBindingInfo
+    tensor1 = output_binding_info1[1]
+
+    assert tensor1.GetDataType() == 2
+    assert tensor1.GetNumDimensions() == 2
+    assert tensor1.GetNumElements() == 10
+    assert tensor1.GetQuantizationOffset() == 0
+    assert tensor1.GetQuantizationScale() == 0.00390625
+
+
+def test_deserializer_filenotfound_exception(shared_data_folder):
+    parser = ann.IDeserializer()
+
+    with pytest.raises(RuntimeError) as err:
+        parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'some_unknown_network.armnn'))
+
+    # Only check for part of the exception since the exception returns
+    # absolute path which will change on different machines.
+    assert 'Cannot read the file' in str(err.value)
+
+
+def test_deserializer_end_to_end(shared_data_folder):
+    parser = ann.IDeserializer()
+
+    network = parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, "mock_model.armnn"))
+
+    # use 0 as a dummy value for layer_id, which is unused in the actual implementation
+    layer_id = 0
+    input_name = 'input_1'
+    output_name = 'dense/Softmax'
+
+    input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name)
+
+    preferred_backends = [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')]
+
+    options = ann.CreationOptions()
+    runtime = ann.IRuntime(options)
+
+    opt_network, messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions())
+    assert 0 == len(messages)
+
+    net_id, messages = runtime.LoadNetwork(opt_network)
+    assert "" == messages
+
+    # Load test image data stored in input_lite.npy
+    input_tensor_data = np.load(os.path.join(shared_data_folder, 'deserializer/input_lite.npy'))
+    input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data])
+
+    output_tensors = []
+    out_bind_info = parser.GetNetworkOutputBindingInfo(layer_id, output_name)
+    out_tensor_info = out_bind_info[1]
+    out_tensor_id = out_bind_info[0]
+    output_tensors.append((out_tensor_id,
+                           ann.Tensor(out_tensor_info)))
+
+    runtime.EnqueueWorkload(net_id, input_tensors, output_tensors)
+
+    output_vectors = []
+    for index, out_tensor in enumerate(output_tensors):
+        output_vectors.append(out_tensor[1].get_memory_area())
+
+    # Load golden output file for result comparison.
+    expected_outputs = np.load(os.path.join(shared_data_folder, 'deserializer/golden_output_lite.npy'))
+
+    # Check that output matches golden output
+    assert (expected_outputs == output_vectors[0]).all()
index 24765c7..0e1d663 100644 (file)
@@ -1,4 +1,4 @@
-# Copyright © 2020 Arm Ltd. All rights reserved.
+# Copyright © 2020 Arm Ltd and Contributors. All rights reserved.
 # SPDX-License-Identifier: MIT
 import inspect
 from typing import Tuple
@@ -6,6 +6,7 @@ from typing import Tuple
 import pytest
 
 import pyarmnn._generated.pyarmnn as generated_armnn
+import pyarmnn._generated.pyarmnn as generated_deserializer
 import pyarmnn._generated.pyarmnn_caffeparser as generated_caffe
 import pyarmnn._generated.pyarmnn_onnxparser as generated_onnx
 import pyarmnn._generated.pyarmnn_tfliteparser as generated_tflite
@@ -26,6 +27,7 @@ def get_classes(swig_independent_classes: Tuple):
     ignored_class_names = (*swig_independent_classes, '_SwigNonDynamicMeta')
     return list(filter(lambda x: x[0] not in ignored_class_names,
                        inspect.getmembers(generated_armnn, inspect.isclass) +
+                       inspect.getmembers(generated_deserializer, inspect.isclass) +
                        inspect.getmembers(generated_caffe, inspect.isclass) +
                        inspect.getmembers(generated_tflite, inspect.isclass) +
                        inspect.getmembers(generated_onnx, inspect.isclass) +
diff --git a/python/pyarmnn/test/testdata/shared/deserializer/golden_output_lite.npy b/python/pyarmnn/test/testdata/shared/deserializer/golden_output_lite.npy
new file mode 100644 (file)
index 0000000..099f7fe
Binary files /dev/null and b/python/pyarmnn/test/testdata/shared/deserializer/golden_output_lite.npy differ
diff --git a/python/pyarmnn/test/testdata/shared/deserializer/input_lite.npy b/python/pyarmnn/test/testdata/shared/deserializer/input_lite.npy
new file mode 100644 (file)
index 0000000..5317468
Binary files /dev/null and b/python/pyarmnn/test/testdata/shared/deserializer/input_lite.npy differ
diff --git a/python/pyarmnn/test/testdata/shared/mock_model.armnn b/python/pyarmnn/test/testdata/shared/mock_model.armnn
new file mode 100644 (file)
index 0000000..964550b
Binary files /dev/null and b/python/pyarmnn/test/testdata/shared/mock_model.armnn differ
diff --git a/src/armnnDeserializer/CMakeLists.txt b/src/armnnDeserializer/CMakeLists.txt
new file mode 100755 (executable)
index 0000000..63917fc
--- /dev/null
@@ -0,0 +1,44 @@
+#
+# Copyright © 2020 Arm Ltd and Contributors. All rights reserved.
+# SPDX-License-Identifier: MIT
+#
+if(BUILD_ARMNN_DESERIALIZER)
+    find_program(FLATC flatc
+                 HINTS ${FLATC_DIR}
+                 DOC "Path to 'flatc', the flatbuffers compiler")
+    if (NOT FLATC)
+        message(SEND_ERROR "flatc not found. Specify the full path of the flatc executable with -DFLATC=<flatc path>")
+    endif()
+
+    add_custom_command(
+        # Generate an ArmnnSchema_generated.h file if it doesn't exist, or update it when necessary otherwise
+        OUTPUT ArmnnSchema_generated.h DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/../armnnSerializer/ArmnnSchema.fbs
+        COMMAND ${FLATC} -o ${CMAKE_CURRENT_BINARY_DIR} --cpp ${CMAKE_CURRENT_SOURCE_DIR}/../armnnSerializer/ArmnnSchema.fbs
+    )
+
+    set(armnn_deserializer_sources)
+    list(APPEND armnn_deserializer_sources
+        ArmnnSchema_generated.h
+        Deserializer.hpp
+        Deserializer.cpp
+        )
+
+    add_library_ex(armnnDeserializer SHARED ${armnn_deserializer_sources})
+
+    include_directories(SYSTEM "${FLATBUFFERS_INCLUDE_PATH}")
+    set_target_properties(armnnDeserializer PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR})
+    target_include_directories(armnnDeserializer PRIVATE ../armnn)
+    target_include_directories(armnnDeserializer PRIVATE ../armnnUtils)
+
+    # System include to suppress warnings for flatbuffers generated files
+    target_include_directories(armnnDeserializer SYSTEM PRIVATE ${CMAKE_CURRENT_BINARY_DIR})
+
+    target_link_libraries(armnnDeserializer armnn ${FLATBUFFERS_LIBRARY})
+
+    install(TARGETS armnnDeserializer
+            EXPORT armnn-targets
+            LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
+            ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
+    )
+    set_target_properties(armnnDeserializer PROPERTIES VERSION ${GENERIC_LIB_VERSION} SOVERSION ${GENERIC_LIB_SOVERSION} )
+endif()