From bceff2fb3fc68bb0aa88b886900c34b77340c826 Mon Sep 17 00:00:00 2001 From: surmeh01 Date: Thu, 29 Mar 2018 16:29:27 +0100 Subject: [PATCH 1/1] Release 18.03 --- Android.bp | 3 + CMakeLists.txt | 67 +- README.md | 6 + include/armnn/INetwork.hpp | 2 + include/armnn/Types.hpp | 3 + include/armnn/TypesUtils.hpp | 50 + include/armnn/Version.hpp | 2 +- include/armnnTfParser/ITfParser.hpp | 60 + scripts/generate_tensorflow_protobuf.sh | 73 + src/armnn/Graph.cpp | 77 + src/armnn/Graph.hpp | 100 +- src/armnn/Layer.cpp | 11 +- src/armnn/Layer.hpp | 10 + src/armnn/Layers.cpp | 69 +- src/armnn/Layers.hpp | 9 +- src/armnn/Network.cpp | 7 +- src/armnn/Network.hpp | 1 + src/armnn/Optimizer.cpp | 26 +- src/armnn/Optimizer.hpp | 5 +- src/armnn/Runtime.cpp | 30 +- src/armnn/Runtime.hpp | 2 + src/armnn/SerializeLayerParameters.cpp | 156 ++ src/armnn/SerializeLayerParameters.hpp | 73 + src/armnn/backends/ArmComputeTensorUtils.cpp | 7 +- src/armnn/backends/ClWorkloadFactory.cpp | 67 +- src/armnn/backends/ClWorkloadFactory.hpp | 11 +- src/armnn/backends/NeonLayerSupport.cpp | 26 +- src/armnn/backends/NeonWorkloadFactory.cpp | 2 +- src/armnn/backends/NeonWorkloads.hpp | 2 + .../NeonConvolution2dBaseWorkload.cpp | 4 - .../NeonConvolution2dBaseWorkload.hpp | 2 + .../NeonConvolution2dFloat32Workload.cpp | 7 +- .../NeonConvolution2dUint8Workload.cpp | 33 + .../NeonConvolution2dUint8Workload.hpp | 27 + src/armnn/backends/RefWorkloads/Addition.cpp | 6 +- src/armnn/backends/RefWorkloads/Merger.hpp | 1 + src/armnn/backends/RefWorkloads/Multiplication.cpp | 42 +- src/armnn/backends/RefWorkloads/Multiplication.hpp | 12 +- src/armnn/backends/RefWorkloads/Pooling2d.cpp | 4 +- .../RefMultiplicationFloat32Workload.cpp | 7 +- .../RefMultiplicationUint8Workload.cpp | 7 +- src/armnn/backends/RefWorkloads/Splitter.hpp | 1 + src/armnn/backends/WorkloadData.cpp | 17 +- src/armnn/backends/test/ArmComputeCl.cpp | 13 +- src/armnn/backends/test/ArmComputeNeon.cpp | 10 + src/armnn/backends/test/LayerTests.cpp | 322 ++- src/armnn/backends/test/LayerTests.hpp | 9 + src/armnn/backends/test/PermuteTestImpl.hpp | 104 + src/armnn/backends/test/Pooling2dTestImpl.hpp | 77 + src/armnn/backends/test/Reference.cpp | 11 + src/armnn/optimizations/Optimization.hpp | 27 +- .../optimizations/OptimizeConsecutiveReshapes.hpp | 4 +- src/armnn/optimizations/SquashEqualSiblings.hpp | 28 +- src/armnn/test/Network_test.cpp | 58 + src/armnn/test/OptimizerTests.cpp | 334 +++ src/armnn/test/RuntimeTests.cpp | 15 +- src/armnnCaffeParser/CaffeSupport.md | 31 + src/armnnCaffeParser/README.md | 5 + src/armnnTfParser/README.md | 5 + src/armnnTfParser/TensorFlowSupport.md | 111 + src/armnnTfParser/TfParser.cpp | 2200 ++++++++++++++++++++ src/armnnTfParser/TfParser.hpp | 199 ++ src/armnnTfParser/test/Activations.cpp | 113 + src/armnnTfParser/test/Addition.cpp | 78 + src/armnnTfParser/test/BiasAdd.cpp | 104 + src/armnnTfParser/test/BroadcastForAdd.cpp | 149 ++ src/armnnTfParser/test/Concat.cpp | 183 ++ src/armnnTfParser/test/ConcatOfConcats.cpp | 316 +++ src/armnnTfParser/test/Constant.cpp | 321 +++ src/armnnTfParser/test/Convolution2d.cpp | 322 +++ src/armnnTfParser/test/DepthwiseConvolution2d.cpp | 166 ++ src/armnnTfParser/test/FullyConnected.cpp | 579 ++++++ src/armnnTfParser/test/FusedBatchNorm.cpp | 175 ++ src/armnnTfParser/test/Identity.cpp | 161 ++ .../test/LocalResponseNormalization.cpp | 121 ++ src/armnnTfParser/test/MultiOutput.cpp | 144 ++ src/armnnTfParser/test/Multiplication.cpp | 172 ++ src/armnnTfParser/test/PassThru.cpp | 52 + src/armnnTfParser/test/Pooling.cpp | 112 + src/armnnTfParser/test/Reshape.cpp | 86 + src/armnnTfParser/test/ResizeBilinear.cpp | 114 + src/armnnTfParser/test/Shape.cpp | 94 + src/armnnTfParser/test/Softmax.cpp | 55 + src/armnnTfParser/test/Squeeze.cpp | 108 + src/armnnTfParser/test/TestDependencies.cpp | 296 +++ src/armnnTfParser/test/TestMultiInputsOutputs.cpp | 92 + src/armnnUtils/DotSerializer.cpp | 219 ++ src/armnnUtils/DotSerializer.hpp | 131 ++ tests/CMakeLists.txt | 49 +- tests/ExecuteNetwork/ExecuteNetwork.cpp | 8 + tests/ImageNetDatabase.cpp | 7 - tests/InferenceTest.inl | 20 + tests/InferenceTestImage.cpp | 31 + tests/InferenceTestImage.hpp | 6 + tests/MobileNetDatabase.cpp | 133 ++ tests/MobileNetDatabase.hpp | 36 + tests/TfCifar10-Armnn/TfCifar10-Armnn.cpp | 17 + tests/TfCifar10-Armnn/cifar10_tf.prototxt | 915 -------- tests/TfInceptionV3-Armnn/TfInceptionV3-Armnn.cpp | 23 + tests/TfInceptionV3-Armnn/Validation.txt | 201 ++ tests/TfMnist-Armnn/TfMnist-Armnn.cpp | 17 + tests/TfMnist-Armnn/simple_mnist_tf.prototxt | 117 -- tests/TfMobileNet-Armnn/TfMobileNet-Armnn.cpp | 50 + tests/TfMobileNet-Armnn/Validation.txt | 201 ++ tests/TfMobileNet-Armnn/labels.txt | 1001 +++++++++ 105 files changed, 10715 insertions(+), 1270 deletions(-) create mode 100644 include/armnnTfParser/ITfParser.hpp create mode 100755 scripts/generate_tensorflow_protobuf.sh create mode 100644 src/armnn/SerializeLayerParameters.cpp create mode 100644 src/armnn/SerializeLayerParameters.hpp create mode 100644 src/armnn/backends/NeonWorkloads/NeonConvolution2dUint8Workload.cpp create mode 100644 src/armnn/backends/NeonWorkloads/NeonConvolution2dUint8Workload.hpp create mode 100644 src/armnn/test/OptimizerTests.cpp create mode 100644 src/armnnCaffeParser/CaffeSupport.md create mode 100644 src/armnnCaffeParser/README.md create mode 100644 src/armnnTfParser/README.md create mode 100644 src/armnnTfParser/TensorFlowSupport.md create mode 100644 src/armnnTfParser/TfParser.cpp create mode 100644 src/armnnTfParser/TfParser.hpp create mode 100644 src/armnnTfParser/test/Activations.cpp create mode 100644 src/armnnTfParser/test/Addition.cpp create mode 100644 src/armnnTfParser/test/BiasAdd.cpp create mode 100644 src/armnnTfParser/test/BroadcastForAdd.cpp create mode 100644 src/armnnTfParser/test/Concat.cpp create mode 100644 src/armnnTfParser/test/ConcatOfConcats.cpp create mode 100644 src/armnnTfParser/test/Constant.cpp create mode 100644 src/armnnTfParser/test/Convolution2d.cpp create mode 100644 src/armnnTfParser/test/DepthwiseConvolution2d.cpp create mode 100644 src/armnnTfParser/test/FullyConnected.cpp create mode 100644 src/armnnTfParser/test/FusedBatchNorm.cpp create mode 100644 src/armnnTfParser/test/Identity.cpp create mode 100644 src/armnnTfParser/test/LocalResponseNormalization.cpp create mode 100644 src/armnnTfParser/test/MultiOutput.cpp create mode 100644 src/armnnTfParser/test/Multiplication.cpp create mode 100644 src/armnnTfParser/test/PassThru.cpp create mode 100644 src/armnnTfParser/test/Pooling.cpp create mode 100644 src/armnnTfParser/test/Reshape.cpp create mode 100644 src/armnnTfParser/test/ResizeBilinear.cpp create mode 100644 src/armnnTfParser/test/Shape.cpp create mode 100644 src/armnnTfParser/test/Softmax.cpp create mode 100644 src/armnnTfParser/test/Squeeze.cpp create mode 100644 src/armnnTfParser/test/TestDependencies.cpp create mode 100644 src/armnnTfParser/test/TestMultiInputsOutputs.cpp create mode 100644 src/armnnUtils/DotSerializer.cpp create mode 100644 src/armnnUtils/DotSerializer.hpp create mode 100644 tests/MobileNetDatabase.cpp create mode 100644 tests/MobileNetDatabase.hpp create mode 100644 tests/TfCifar10-Armnn/TfCifar10-Armnn.cpp delete mode 100644 tests/TfCifar10-Armnn/cifar10_tf.prototxt create mode 100644 tests/TfInceptionV3-Armnn/TfInceptionV3-Armnn.cpp create mode 100644 tests/TfInceptionV3-Armnn/Validation.txt create mode 100644 tests/TfMnist-Armnn/TfMnist-Armnn.cpp delete mode 100644 tests/TfMnist-Armnn/simple_mnist_tf.prototxt create mode 100644 tests/TfMobileNet-Armnn/TfMobileNet-Armnn.cpp create mode 100644 tests/TfMobileNet-Armnn/Validation.txt create mode 100644 tests/TfMobileNet-Armnn/labels.txt diff --git a/Android.bp b/Android.bp index 836c6ce..2ab4970 100644 --- a/Android.bp +++ b/Android.bp @@ -16,6 +16,7 @@ cc_library_static { srcs: [ "src/armnnUtils/Logging.cpp", "src/armnnUtils/Permute.cpp", + "src/armnnUtils/DotSerializer.cpp", "src/armnn/backends/ArmComputeTensorUtils.cpp", "src/armnn/backends/ClWorkloads/ClActivationFloat32Workload.cpp", "src/armnn/backends/ClWorkloads/ClActivationUint8Workload.cpp", @@ -54,6 +55,7 @@ cc_library_static { "src/armnn/backends/NeonWorkloads/NeonConstantUint8Workload.cpp", "src/armnn/backends/NeonWorkloads/NeonConvolution2dBaseWorkload.cpp", "src/armnn/backends/NeonWorkloads/NeonConvolution2dFloat32Workload.cpp", + "src/armnn/backends/NeonWorkloads/NeonConvolution2dUint8Workload.cpp", "src/armnn/backends/NeonWorkloads/NeonDepthwiseConvolutionFloat32Workload.cpp", "src/armnn/backends/NeonWorkloads/NeonDepthwiseConvolutionUint8Workload.cpp", "src/armnn/backends/NeonWorkloads/NeonFloorFloat32Workload.cpp", @@ -127,6 +129,7 @@ cc_library_static { "src/armnn/Graph.cpp", "src/armnn/Optimizer.cpp", "src/armnn/Runtime.cpp", + "src/armnn/SerializeLayerParameters.cpp", "src/armnn/InternalTypes.cpp", "src/armnn/Layer.cpp", "src/armnn/Layers.cpp", diff --git a/CMakeLists.txt b/CMakeLists.txt index d09e549..6a1e6a4 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -25,8 +25,10 @@ list(APPEND armnnUtils_sources src/armnnUtils/Permute.hpp src/armnnUtils/Logging.cpp src/armnnUtils/Permute.cpp + src/armnnUtils/DotSerializer.cpp + src/armnnUtils/DotSerializer.hpp ) -if(BUILD_CAFFE_PARSER) +if(BUILD_TF_PARSER OR BUILD_CAFFE_PARSER) list(APPEND armnnUtils_sources src/armnnUtils/ParserPrototxtFixture.hpp ) @@ -59,6 +61,29 @@ if(BUILD_CAFFE_PARSER) target_link_libraries(armnnCaffeParser ${PROTOBUF_LIBRARIES}) endif() +if(BUILD_TF_PARSER) + set(armnn_tf_parser_sources) + list(APPEND armnn_tf_parser_sources + include/armnnTfParser/ITfParser.hpp + src/armnnTfParser/TfParser.hpp + src/armnnTfParser/TfParser.cpp + ${TF_PROTOBUFS} + ) + # The generated tensorflow protobuf .cc files are not warning clean and we can't fix them. + if(COMPILER_IS_GNU_LIKE) + set_source_files_properties(${TF_PROTOBUFS} PROPERTIES COMPILE_FLAGS "-Wno-conversion -Wno-sign-conversion") + endif() + + add_library_ex(armnnTfParser SHARED ${armnn_tf_parser_sources}) + + target_include_directories(armnnTfParser PRIVATE src/armnnUtils) + + target_link_libraries(armnnTfParser armnn) + + # Protobuf (use the specific version tensorflow wants) + target_link_libraries(armnnTfParser ${PROTOBUF_LIBRARIES}) +endif() + # ArmNN source files required for all build options list(APPEND armnn_sources include/armnn/ArmNN.hpp @@ -199,6 +224,8 @@ list(APPEND armnn_sources src/armnn/Layers.cpp src/armnn/Runtime.hpp src/armnn/Runtime.cpp + src/armnn/SerializeLayerParameters.cpp + src/armnn/SerializeLayerParameters.hpp src/armnn/Descriptors.cpp src/armnn/LoadedNetwork.hpp src/armnn/LoadedNetwork.cpp @@ -247,6 +274,8 @@ if(ARMCOMPUTENEON) src/armnn/backends/NeonWorkloads/NeonConvolution2dBaseWorkload.hpp src/armnn/backends/NeonWorkloads/NeonConvolution2dFloat32Workload.cpp src/armnn/backends/NeonWorkloads/NeonConvolution2dFloat32Workload.hpp + src/armnn/backends/NeonWorkloads/NeonConvolution2dUint8Workload.cpp + src/armnn/backends/NeonWorkloads/NeonConvolution2dUint8Workload.hpp src/armnn/backends/NeonWorkloads/NeonDepthwiseConvolutionFloat32Workload.cpp src/armnn/backends/NeonWorkloads/NeonDepthwiseConvolutionFloat32Workload.hpp src/armnn/backends/NeonWorkloads/NeonDepthwiseConvolutionUint8Workload.cpp @@ -380,6 +409,9 @@ install(TARGETS armnn DESTINATION ${CMAKE_INSTALL_PREFIX}/lib) if(BUILD_CAFFE_PARSER) install(TARGETS armnnCaffeParser DESTINATION ${CMAKE_INSTALL_PREFIX}/lib) endif() +if(BUILD_TF_PARSER) + install(TARGETS armnnTfParser DESTINATION ${CMAKE_INSTALL_PREFIX}/lib) +endif() install(DIRECTORY include/ DESTINATION ${CMAKE_INSTALL_PREFIX}/include) target_link_libraries(armnn ${Boost_LOG_LIBRARY} ${Boost_THREAD_LIBRARY} ${Boost_SYSTEM_LIBRARY}) @@ -404,6 +436,7 @@ if(BUILD_UNIT_TESTS) src/armnn/test/EndToEndTest.cpp src/armnn/test/UtilsTests.cpp src/armnn/test/GraphTests.cpp + src/armnn/test/OptimizerTests.cpp src/armnn/test/RuntimeTests.cpp src/armnn/test/CreateWorkload.hpp src/armnn/test/TensorTest.cpp @@ -451,6 +484,34 @@ if(BUILD_UNIT_TESTS) src/armnn/backends/test/MemCopyTests.cpp) endif() + if(BUILD_TF_PARSER) + list(APPEND unittest_sources + src/armnnTfParser/test/Activations.cpp + src/armnnTfParser/test/Addition.cpp + src/armnnTfParser/test/BiasAdd.cpp + src/armnnTfParser/test/BroadcastForAdd.cpp + src/armnnTfParser/test/Convolution2d.cpp + src/armnnTfParser/test/Concat.cpp + src/armnnTfParser/test/ConcatOfConcats.cpp + src/armnnTfParser/test/DepthwiseConvolution2d.cpp + src/armnnTfParser/test/FusedBatchNorm.cpp + src/armnnTfParser/test/Identity.cpp + src/armnnTfParser/test/LocalResponseNormalization.cpp + src/armnnTfParser/test/Multiplication.cpp + src/armnnTfParser/test/MultiOutput.cpp + src/armnnTfParser/test/PassThru.cpp + src/armnnTfParser/test/Pooling.cpp + src/armnnTfParser/test/Reshape.cpp + src/armnnTfParser/test/ResizeBilinear.cpp + src/armnnTfParser/test/Shape.cpp + src/armnnTfParser/test/Softmax.cpp + src/armnnTfParser/test/TestDependencies.cpp + src/armnnTfParser/test/FullyConnected.cpp + src/armnnTfParser/test/Constant.cpp + src/armnnTfParser/test/TestMultiInputsOutputs.cpp + src/armnnTfParser/test/Squeeze.cpp) + endif() + if(BUILD_CAFFE_PARSER) list(APPEND unittest_sources src/armnnCaffeParser/test/TestAdd.cpp @@ -479,6 +540,10 @@ if(BUILD_UNIT_TESTS) target_link_libraries(UnitTests ${CMAKE_THREAD_LIBS_INIT}) target_link_libraries(UnitTests ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) + if(BUILD_TF_PARSER) + target_link_libraries(UnitTests armnnTfParser) + endif() + if(BUILD_CAFFE_PARSER) target_link_libraries(UnitTests armnnCaffeParser) endif() diff --git a/README.md b/README.md index e69de29..455fe7a 100644 --- a/README.md +++ b/README.md @@ -0,0 +1,6 @@ +# Arm NN + +For more information about Arm NN, see: https://developer.arm.com/products/processors/machine-learning/arm-nn + +There is a getting started guide here: https://developer.arm.com/technologies/machine-learning-on-arm/developer-material/how-to-guides/configuring-the-arm-nn-sdk-build-environment-for-caffe + diff --git a/include/armnn/INetwork.hpp b/include/armnn/INetwork.hpp index 8545629..5cff810 100644 --- a/include/armnn/INetwork.hpp +++ b/include/armnn/INetwork.hpp @@ -63,6 +63,7 @@ public: virtual const IOutputSlot& GetOutputSlot(unsigned int index) const = 0; virtual IOutputSlot& GetOutputSlot(unsigned int index) = 0; + virtual LayerGuid GetGuid() const = 0; protected: ~IConnectableLayer() {} // Objects are not deletable via the handle }; @@ -265,6 +266,7 @@ public: static void Destroy(IOptimizedNetwork* network); virtual Status PrintGraph() = 0; + virtual Status SerializeToDot(std::ostream& stream) const = 0; protected: ~IOptimizedNetwork() {} diff --git a/include/armnn/Types.hpp b/include/armnn/Types.hpp index e1aa393..c9a4bf1 100644 --- a/include/armnn/Types.hpp +++ b/include/armnn/Types.hpp @@ -152,4 +152,7 @@ private: SizeType m_NumDimMappings; }; +// Define LayerGuid type. +using LayerGuid = unsigned int; + } diff --git a/include/armnn/TypesUtils.hpp b/include/armnn/TypesUtils.hpp index a851b66..ba18e00 100644 --- a/include/armnn/TypesUtils.hpp +++ b/include/armnn/TypesUtils.hpp @@ -34,6 +34,56 @@ constexpr char const* GetComputeDeviceAsCString(Compute compute) } } +constexpr char const* GetActivationFunctionAsCString(ActivationFunction activation) +{ + switch (activation) + { + case ActivationFunction::Sigmoid: return "Sigmoid"; + case ActivationFunction::TanH: return "TanH"; + case ActivationFunction::Linear: return "Linear"; + case ActivationFunction::ReLu: return "ReLu"; + case ActivationFunction::BoundedReLu: return "BoundedReLu"; + case ActivationFunction::SoftReLu: return "SoftReLu"; + case ActivationFunction::LeakyReLu: return "LeakyReLu"; + case ActivationFunction::Abs: return "Abs"; + case ActivationFunction::Sqrt: return "Sqrt"; + case ActivationFunction::Square: return "Square"; + default: return "Unknown"; + } +} + +constexpr char const* GetPoolingAlgorithmAsCString(PoolingAlgorithm pooling) +{ + switch (pooling) + { + case PoolingAlgorithm::Average: return "Average"; + case PoolingAlgorithm::Max: return "Max"; + case PoolingAlgorithm::L2: return "L2"; + default: return "Unknown"; + } +} + +constexpr char const* GetOutputShapeRoundingAsCString(OutputShapeRounding rounding) +{ + switch (rounding) + { + case OutputShapeRounding::Ceiling: return "Ceiling"; + case OutputShapeRounding::Floor: return "Floor"; + default: return "Unknown"; + } +} + + +constexpr char const* GetPaddingMethodAsCString(PaddingMethod method) +{ + switch (method) + { + case PaddingMethod::Exclude: return "Exclude"; + case PaddingMethod::IgnoreValue: return "IgnoreValue"; + default: return "Unknown"; + } +} + constexpr unsigned int GetDataTypeSize(DataType dataType) { switch (dataType) diff --git a/include/armnn/Version.hpp b/include/armnn/Version.hpp index 6ce8256..5fdcf8d 100644 --- a/include/armnn/Version.hpp +++ b/include/armnn/Version.hpp @@ -9,4 +9,4 @@ // YYYY = 4-digit year number // MM = 2-digit month number // PP = 2-digit patch number -#define ARMNN_VERSION "20180200" +#define ARMNN_VERSION "20180300" diff --git a/include/armnnTfParser/ITfParser.hpp b/include/armnnTfParser/ITfParser.hpp new file mode 100644 index 0000000..a6f56c8 --- /dev/null +++ b/include/armnnTfParser/ITfParser.hpp @@ -0,0 +1,60 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// +#pragma once + +#include "armnn/Types.hpp" +#include "armnn/Tensor.hpp" +#include "armnn/INetwork.hpp" + +#include +#include +#include +#include + +namespace armnnTfParser +{ + +using BindingPointInfo = std::pair; + +class ITfParser; +using ITfParserPtr = std::unique_ptr; + +/// parses a directed acyclic graph from a tensorflow protobuf file +class ITfParser +{ +public: + static ITfParser* CreateRaw(); + static ITfParserPtr Create(); + static void Destroy(ITfParser* parser); + + /// Create the network from a protobuf text file on disk + virtual armnn::INetworkPtr CreateNetworkFromTextFile( + const char* graphFile, + const std::map& inputShapes, + const std::vector& requestedOutputs) = 0; + + /// Create the network from a protobuf binary file on disk + virtual armnn::INetworkPtr CreateNetworkFromBinaryFile( + const char* graphFile, + const std::map& inputShapes, + const std::vector& requestedOutputs) = 0; + + /// Create the network directly from protobuf text in a string. Useful for debugging/testing + virtual armnn::INetworkPtr CreateNetworkFromString( + const char* protoText, + const std::map& inputShapes, + const std::vector& requestedOutputs) = 0; + + /// Retrieve binding info (layer id and tensor info) for the network input identified by the given layer name + virtual BindingPointInfo GetNetworkInputBindingInfo(const std::string& name) const = 0; + + /// Retrieve binding info (layer id and tensor info) for the network output identified by the given layer name + virtual BindingPointInfo GetNetworkOutputBindingInfo(const std::string& name) const = 0; + +protected: + virtual ~ITfParser() {}; +}; + +} diff --git a/scripts/generate_tensorflow_protobuf.sh b/scripts/generate_tensorflow_protobuf.sh new file mode 100755 index 0000000..52c1827 --- /dev/null +++ b/scripts/generate_tensorflow_protobuf.sh @@ -0,0 +1,73 @@ +#!/bin/sh +# +# Copyright © 2017 Arm Ltd. All rights reserved. +# See LICENSE file in the project root for full license information. +# + +THIS_SCRIPT=$0 +OUTPUT_DIR=$1 +PROTOBUF_INSTALL_DIR=$2 + +usage() +{ + echo + echo "Usage: ${THIS_SCRIPT} [PROTOBUF_INSTALL_DIR]" + echo + echo " is the location where the generated files will be placed" + echo " [PROTOBUF_INSTALL_DIR] the location of the protobuf installation" + echo +} + +if [ "x$OUTPUT_DIR" = "x" ] +then + usage + exit 1 +fi + +mkdir -p ${OUTPUT_DIR} +ERR=$? +if [ $ERR -ne 0 ] +then + echo + echo "Cannot create output dir: ${OUTPUT_DIR}" + echo "mkdir returned: $ERR" + echo + usage + exit 1 +fi + + +if [ "x${PROTOBUF_INSTALL_DIR}" = "x" ] +then + PROTOBUF_INSTALL_DIR=/usr/local +fi + +if [ ! -x "${PROTOBUF_INSTALL_DIR}/bin/protoc" ] +then + echo + echo "No usable protocol buffer (protoc) compiler found in ${PROTOBUF_INSTALL_DIR}/bin/" + echo "You can specify the location of the protobuf installation as the second" + echo "argument of ${THIS_SCRIPT}." + usage + exit 1 +fi + + +TF_PROTO_FILES=tensorflow/contrib/makefile/tf_proto_files.txt +if [ -r $TF_PROTO_FILES ] +then + OLD_LD_LIBRARY_PATH=$LD_LIBRARY_PATH + for i in `cat $TF_PROTO_FILES` + do + LD_LIBRARY_PATH=$OLD_LD_LIBRARY_PATH:${PROTOBUF_INSTALL_DIR}/lib \ + $PROTOBUF_INSTALL_DIR/bin/protoc $i \ + --proto_path=. \ + --proto_path=${PROTOBUF_INSTALL_DIR}/include \ + --cpp_out $OUTPUT_DIR + done +else + echo "Couldn't find $TF_PROTO_FILES. This script should be run from the" + echo "tensorflow source directory." + exit 1 +fi + diff --git a/src/armnn/Graph.cpp b/src/armnn/Graph.cpp index 97f702e..af3b17e 100644 --- a/src/armnn/Graph.cpp +++ b/src/armnn/Graph.cpp @@ -14,6 +14,9 @@ #include #include +#include +#include + namespace armnn { @@ -71,6 +74,80 @@ Status Graph::Print() const return Status::Success; } +Status Graph::SerializeToDot(std::ostream& stream) +{ + { + DotGraph graph(stream, "Optimized"); + + { + // Default node attributes: + DotDefaults nodes(stream, "node"); + nodes.GetAttributeSet() + .AddAttribute("shape", "record"); + } + + { + // Default edge attributes: + DotDefaults edges(stream, "edge"); + edges.GetAttributeSet() + .AddAttribute("fontsize", 8) + .AddAttribute("fontcolor", "blue") + .AddAttribute("fontname", "arial-bold"); + } + + // First declare the nodes + for (auto&& layer : m_Layers) + { + DotNode node(stream, layer->GetGuid(), GetLayerTypeAsCString(layer->GetType())); + // Extract the layer parameters + ParameterStringifyFunction extractParams = [&node](const std::string & name, const std::string & value){ + node.GetContents().AddContent(name + " : " + value); + }; + layer->SerializeLayerParameters(extractParams); + } + + // Second declare the edges + for (auto&& layer : m_Layers) + { + LayerGuid toId = layer->GetGuid(); + + for (unsigned int i=0;iGetNumInputSlots(); i++) + { + OutputSlot* outputSlot = static_cast(layer->GetInputSlot(i).GetConnection()); + LayerGuid fromId = outputSlot->GetOwningLayer().GetGuid(); + DotEdge edge(stream, fromId, toId); + + // Now Print the tensor shape on the edge + { + // Construct the label attribute with HTML markup + std::stringstream ss; + { + ss << "< ["; + const TensorShape& shape = outputSlot->GetTensorInfo().GetShape(); + for (unsigned int i = 0; i < shape.GetNumDimensions(); i++) + { + if (i != 0) + { + ss << ","; + } + ss << shape[i]; + } + ss << "] >"; + } + + edge.GetAttributeSet().AddAttribute("label", ss); + } + } + } + } + + if (stream.bad()) + { + return Status::Failure; + } + return Status::Success; +} + Status Graph::AllocateDynamicBuffers() { for (auto&& layer : m_Layers) diff --git a/src/armnn/Graph.hpp b/src/armnn/Graph.hpp index 8888034..34aefbf 100644 --- a/src/armnn/Graph.hpp +++ b/src/armnn/Graph.hpp @@ -92,6 +92,8 @@ public: Status Print() const; + Status SerializeToDot(std::ostream& stream); + /// Adds a new layer of type LaterType to the graph constructed with the arguments passed. template LayerT* AddLayer(Args&&... args); @@ -121,6 +123,11 @@ public: /// Return const iterator pointing to end of list. Lowercase for range-based for loops. ConstIterator end() const { return {m_Layers.end(), &PtrCast}; } + /// Return const iterator pointing to begin of list. Lowercase for range-based for loops. + ConstIterator cbegin() const { return begin(); } + /// Return const iterator pointing to end of list. Lowercase for range-based for loops. + ConstIterator cend() const { return end(); } + /// Sort layers in topological order and return this. Graph& TopologicalSort() { const_cast(this)->TopologicalSort(); return *this; } const Graph& TopologicalSort() const; @@ -154,13 +161,27 @@ private: template class LayerInGraph; + Iterator ForwardToEndOfInputs(Iterator it) const + { + while ((it != m_Layers.end()) && ((*it)->GetType() == LayerType::Input)) + { + ++it; + } + return it; + } + + Iterator RewindToBeginOfOutputs(Iterator it) const + { + while ((it != m_Layers.begin()) && ((*std::prev(it))->GetType() == LayerType::Output)) + { + --it; + } + return it; + } + /// Get the position of a layer in the graph. Iterator GetPosInGraph(Layer& layer); - /// Adds a new layer of type LaterType to the graph constructed with the arguments passed. - template - LayerInGraph* AddLayerImpl(Iterator insertBefore, Args&&... args); - std::unordered_set m_InputIds; std::unordered_set m_OutputIds; std::unordered_map m_PosInGraphMap; @@ -197,8 +218,19 @@ class Graph::LayerInGraph final : public LayerInGraphBase { public: template + LayerInGraph(Graph& graph, Args&&... args) + : LayerInGraphBase(graph, + // Insert at the back of the intermediate layers (before outputs). + std::prev(graph.end(), IteratorDifference(graph.GetNumOutputs())), + std::forward(args)...) + { + } + template LayerInGraph(Graph& graph, Iterator insertBefore, Args&&... args) - : LayerInGraphBase(graph, insertBefore, std::forward(args)...) + : LayerInGraphBase(graph, + // Make sure it's inserted after all inputs and before all outputs. + graph.ForwardToEndOfInputs(graph.RewindToBeginOfOutputs(insertBefore)), + std::forward(args)...) { } }; @@ -209,8 +241,11 @@ class Graph::LayerInGraph final : public LayerInGraphBase - LayerInGraph(Graph& graph, Iterator insertBefore, Args&&... args) - : LayerInGraphBase(graph, insertBefore, std::forward(args)...) + LayerInGraph(Graph& graph, Args&&... args) + : LayerInGraphBase(graph, + // Always add to the back of the inputs. + std::next(graph.begin(), IteratorDifference(graph.GetNumInputs())), + std::forward(args)...) { const bool isNewId = m_Graph.m_InputIds.emplace(GetBindingId()).second; if (!isNewId) @@ -218,6 +253,12 @@ public: throw InvalidArgumentException("A layer already exists with the specified id"); } } + template + LayerInGraph(Graph& graph, Iterator insertBefore, Args&&... args) + // Ignore insertBefore. Always add to the back of the inputs. + : LayerInGraph(graph, std::forward(args)...) + { + } ~LayerInGraph() override { const size_t numErased = m_Graph.m_InputIds.erase(GetBindingId()); @@ -232,8 +273,11 @@ class Graph::LayerInGraph final : public LayerInGraphBase - LayerInGraph(Graph& graph, Iterator insertBefore, Args&&... args) - : LayerInGraphBase(graph, insertBefore, std::forward(args)...) + LayerInGraph(Graph& graph, Args&&... args) + : LayerInGraphBase(graph, + // Always add to the back of the outputs. + graph.end(), + std::forward(args)...) { const bool isNewId = m_Graph.m_OutputIds.emplace(GetBindingId()).second; if (!isNewId) @@ -257,42 +301,22 @@ inline Graph::Iterator Graph::GetPosInGraph(Layer& layer) } template -inline Graph::LayerInGraph* Graph::AddLayerImpl(Iterator insertBefore, Args&&... args) -{ - return new LayerInGraph(*this, insertBefore, std::forward(args)...); -} - -/// Inputs are inserted at the front of the list, to keep the order correct if the list is sorted. -/// Outputs are inserted at the back of the list, to keep the order correct if the list is sorted. -/// Other layers are inserted before existing outputs, so the latter remain at the back of the list. -template inline LayerT* Graph::AddLayer(Args&&... args) { - switch (LayerEnumOf()) - { - case LayerType::Input: - { - return AddLayerImpl(begin(), std::forward(args)...); - } - case LayerType::Output: - { - return AddLayerImpl(end(), std::forward(args)...); - } - default: - { - m_LayersInOrder = false; - const auto pos = std::prev(end(), IteratorDifference(GetNumOutputs())); - return AddLayerImpl(pos, std::forward(args)...); - } - } + m_LayersInOrder = m_LayersInOrder && + ((LayerEnumOf() == LayerType::Input) || (LayerEnumOf() == LayerType::Output)); + return new LayerInGraph(*this, std::forward(args)...); } template inline LayerT* Graph::InsertNewLayer(InputSlot& insertBefore, Args&&... args) { - // Insert before the child layer so topological order is kept. - const Iterator pos = GetPosInGraph(insertBefore.GetOwningLayer()); - LayerT* const layer = AddLayerImpl(pos, std::forward(args)...); + // Insert after the parent if any, or before the child otherwise, so topological order is kept. + OutputSlot* parentOut = insertBefore.GetConnectedOutputSlot(); + const Iterator pos = (parentOut != nullptr) + ? std::next(GetPosInGraph(parentOut->GetOwningLayer())) + : GetPosInGraph(insertBefore.GetOwningLayer()); + LayerT* const layer = new LayerInGraph(*this, pos, std::forward(args)...); insertBefore.Insert(*layer); return layer; } diff --git a/src/armnn/Layer.cpp b/src/armnn/Layer.cpp index 20a8ba4..fcf0656 100644 --- a/src/armnn/Layer.cpp +++ b/src/armnn/Layer.cpp @@ -18,7 +18,6 @@ namespace armnn void InputSlot::Insert(Layer& layer) { - BOOST_ASSERT(layer.GetNumInputSlots() <= 1); BOOST_ASSERT(layer.GetNumOutputSlots() == 1); OutputSlot* const prevSlot = GetConnectedOutputSlot(); @@ -115,11 +114,21 @@ void OutputSlot::ValidateConnectionIndex(unsigned int index) const } } +namespace { +LayerGuid GenerateLayerGuid() +{ + //Note: Not thread safe. + static LayerGuid newGuid=0; + return newGuid++; +} +} //namespace + Layer::Layer(unsigned int numInputSlots, unsigned int numOutputSlots, LayerType type, const char* name) : m_OutputHandlers(numOutputSlots) , m_LayerName(name ? name : "") , m_Type(type) , m_ComputeDevice(Compute::Undefined) +, m_Guid(GenerateLayerGuid()) { m_InputSlots.reserve(numInputSlots); for (unsigned int i = 0; i < numInputSlots; ++i) diff --git a/src/armnn/Layer.hpp b/src/armnn/Layer.hpp index 1160f0a..f9f2f22 100644 --- a/src/armnn/Layer.hpp +++ b/src/armnn/Layer.hpp @@ -10,6 +10,7 @@ #include "backends/WorkloadDataCollector.hpp" #include "backends/WorkloadInfo.hpp" #include "InternalTypes.hpp" +#include "SerializeLayerParameters.hpp" #include #include @@ -218,6 +219,10 @@ public: virtual void ValidateTensorShapesFromInputs() = 0; + /// Helper to serialize the layer parameters to string + /// (currently used in DotSerializer and company) + virtual void SerializeLayerParameters(ParameterStringifyFunction & fn) const {} + // IConnectableLayer const char* GetName() const override { return m_LayerName.c_str(); } @@ -230,6 +235,9 @@ public: const OutputSlot& GetOutputSlot(unsigned int index = 0) const override { return m_OutputSlots.at(index); } OutputSlot& GetOutputSlot(unsigned int index = 0) override { return m_OutputSlots.at(index); } + void SetGuid(LayerGuid guid) { m_Guid = guid; } + LayerGuid GetGuid() const final { return m_Guid; } + protected: // Graph needs access to the virtual destructor friend class Graph; @@ -281,6 +289,8 @@ private: /// Used for sorting mutable LayerPriority m_Priority = 0; mutable bool m_Visiting = false; + + LayerGuid m_Guid; }; // A layer user-provided data can be bound to (e.g. inputs, outputs) diff --git a/src/armnn/Layers.cpp b/src/armnn/Layers.cpp index ddbc7d2..48a02ab 100644 --- a/src/armnn/Layers.cpp +++ b/src/armnn/Layers.cpp @@ -11,6 +11,8 @@ #include "Permute.hpp" +#include + namespace armnn { @@ -21,6 +23,7 @@ LayerType* Layer::CloneBase(Graph& graph, Params&& ... params) const LayerType* const layer = graph.AddLayer(std::forward(params)...); layer->SetComputeDevice(m_ComputeDevice); + layer->SetGuid(GetGuid()); return layer; } @@ -82,12 +85,11 @@ void AdditionLayer::ValidateTensorShapesFromInputs() unsigned int dim1 = input1.GetShape()[i]; if (dim0 != dim1) { - BOOST_ASSERT_MSG(dim0 == 1 || dim1 == 1, "Dimensions should either match or one should be one length"); + BOOST_ASSERT_MSG(dim0 == 1 || dim1 == 1, "Dimensions should either match or one should be of size 1."); } } #endif - for (unsigned int i = 0; i < numDims; i++) { unsigned int dim0 = input0.GetShape()[i]; @@ -439,14 +441,31 @@ void MergerLayer::CreateTensorHandles(Graph& graph, const IWorkloadFactory& fact m_OutputHandlers[0].CreateTensorHandles(factory); if (factory.SupportsSubTensors()) { - const unsigned int numInputSlots = GetNumInputSlots(); - for (unsigned int i = 0; i < numInputSlots; ++i) + std::queue m_MergerLayers; + + m_MergerLayers.push(this); + while (!m_MergerLayers.empty()) { - OutputHandler& outputHandler = GetInputSlot(i).GetConnectedOutputSlot()->GetOutputHandler(); + MergerLayer* currentLayer = m_MergerLayers.front(); + ITensorHandle* parentTensor = currentLayer->GetOutputHandler(0).GetData(); - outputHandler.SetData(factory.CreateSubTensorHandle(*m_OutputHandlers[0].GetData(), - outputHandler.GetTensorInfo().GetShape(), - m_Param.GetViewOrigin(i))); + m_MergerLayers.pop(); + + const unsigned int numInputSlots = currentLayer->GetNumInputSlots(); + for (unsigned int i = 0; i < numInputSlots; ++i) + { + OutputSlot* slot = currentLayer->GetInputSlot(i).GetConnectedOutputSlot(); + OutputHandler& outputHandler = slot->GetOutputHandler(); + outputHandler.SetData(factory.CreateSubTensorHandle(*parentTensor, + outputHandler.GetTensorInfo().GetShape(), + currentLayer->m_Param.GetViewOrigin(i))); + + Layer& inputLayer = slot->GetOwningLayer(); + if (inputLayer.GetType() == LayerType::Merger) + { + m_MergerLayers.push(boost::polymorphic_downcast(&inputLayer)); + } + } } } } @@ -568,12 +587,36 @@ MultiplicationLayer* MultiplicationLayer::Clone(Graph& graph) const void MultiplicationLayer::ValidateTensorShapesFromInputs() { - ConditionalThrow(GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() == - GetInputSlot(1).GetConnection()->GetTensorInfo().GetShape(), - "MultiplicationLayer: Inputs must match"); + auto& input0 = GetInputSlot(0).GetConnection()->GetTensorInfo(); + auto& input1 = GetInputSlot(1).GetConnection()->GetTensorInfo(); + + // Get the max of the inputs + BOOST_ASSERT(input0.GetNumDimensions() == input1.GetNumDimensions()); + unsigned int numDims = input0.GetNumDimensions(); + std::vector dims(numDims); + + // validate inputs are broadcast compatible +#if !NDEBUG + for (unsigned int i = 0; i < numDims; i++) + { + unsigned int dim0 = input0.GetShape()[i]; + unsigned int dim1 = input1.GetShape()[i]; + if (dim0 != dim1) + { + BOOST_ASSERT_MSG(dim0 == 1 || dim1 == 1, "Dimensions should either match or one should be of size 1."); + } + } +#endif - TensorInfo infoOut(GetInputSlot(0).GetConnection()->GetTensorInfo()); - ConditionalThrow(GetOutputSlot(0).ValidateTensorShape(infoOut.GetShape()), + for (unsigned int i = 0; i < numDims; i++) + { + unsigned int dim0 = input0.GetShape()[i]; + unsigned int dim1 = input1.GetShape()[i]; + dims[i] = std::max(dim0, dim1); + } + + TensorShape outShape(numDims, dims.data()); + ConditionalThrow(GetOutputSlot(0).ValidateTensorShape(outShape), "MultiplicationLayer: TensorShape set on OutputSlot[0] does not match the inferred shape."); } diff --git a/src/armnn/Layers.hpp b/src/armnn/Layers.hpp index 5a1e3ca..cb460e1 100644 --- a/src/armnn/Layers.hpp +++ b/src/armnn/Layers.hpp @@ -22,10 +22,17 @@ template class LayerWithParameters : public Layer { public: - typedef Parameters DescriptorType; + using DescriptorType = Parameters; const Parameters& GetParameters() const { return m_Param; } + /// Helper to serialize the layer parameters to string + /// (currently used in DotSerializer and company) + void SerializeLayerParameters(ParameterStringifyFunction & fn) const + { + StringifyLayerParameters::Serialize(fn, m_Param); + } + protected: LayerWithParameters(unsigned int numInputSlots, unsigned int numOutputSlots, diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp index 4ee68b3..77390cb 100644 --- a/src/armnn/Network.cpp +++ b/src/armnn/Network.cpp @@ -58,6 +58,11 @@ Status OptimizedNetwork::PrintGraph() return Status::Success; } +Status OptimizedNetwork::SerializeToDot(std::ostream& stream) const +{ + return m_Graph->SerializeToDot(stream); +} + IOptimizedNetworkPtr Optimize(const INetwork& inNetwork, const DeviceSpec& deviceSpec) { const Network& network = *boost::polymorphic_downcast(&inNetwork); @@ -65,7 +70,7 @@ IOptimizedNetworkPtr Optimize(const INetwork& inNetwork, const DeviceSpec& devic OptimizedNetwork* optNet = new OptimizedNetwork(std::move(graph)); - Optimizer::Get().Optimize(optNet->GetGraph()); + Optimizer::Optimize(optNet->GetGraph()); // Infer the tensor infos for all output slots. Throws an exception on failure. optNet->GetGraph().InferTensorInfos(); diff --git a/src/armnn/Network.hpp b/src/armnn/Network.hpp index de0c1ec..4eb67b1 100644 --- a/src/armnn/Network.hpp +++ b/src/armnn/Network.hpp @@ -135,6 +135,7 @@ public: ~OptimizedNetwork(); Status PrintGraph() override; + Status SerializeToDot(std::ostream& stream) const override; Graph& GetGraph() { return *m_Graph; } diff --git a/src/armnn/Optimizer.cpp b/src/armnn/Optimizer.cpp index 85b9f28..9b76c7f 100644 --- a/src/armnn/Optimizer.cpp +++ b/src/armnn/Optimizer.cpp @@ -8,7 +8,7 @@ namespace armnn { -const Optimizer& Optimizer::Get() +Optimizer::Optimizer() { // Add optimizations here static optimizations::SquashEqualPermuteSiblings squashEqualPermuteSiblings; @@ -19,28 +19,26 @@ const Optimizer& Optimizer::Get() static optimizations::OptimizeConsecutiveReshapes optimizeConsecutiveReshapes; // Set optimizations in desired order - static const Optimizer optimizer({ - &squashEqualPermuteSiblings, - &squashEqualReshapeSiblings, - &optimizeInversePermutes, - &movePermuteUp, - &permuteAsReshape, - &optimizeConsecutiveReshapes, - }); - - return optimizer; + m_Optimizations = {&squashEqualPermuteSiblings, + &squashEqualReshapeSiblings, + &optimizeInversePermutes, + &movePermuteUp, + &permuteAsReshape, + &optimizeConsecutiveReshapes, + }; } -void Optimizer::Optimize(Graph& graph) const +void Optimizer::Optimize(Graph& graph) { + Optimizer optimizer; auto it = graph.TopologicalSort().end(); // Call TopologicalSort() in every iteration to re-order the list in case layers where added/removed. while (it != graph.TopologicalSort().begin()) { --it; - for (auto&& optimization : m_Optimizations) + for (auto&& optimization : optimizer.m_Optimizations) { - optimization->Run(graph, it); + optimization->Run(graph, **it); if ((*it)->IsOutputUnconnected()) { diff --git a/src/armnn/Optimizer.hpp b/src/armnn/Optimizer.hpp index 262f264..1f5ed02 100644 --- a/src/armnn/Optimizer.hpp +++ b/src/armnn/Optimizer.hpp @@ -15,14 +15,13 @@ class Optimization; class Optimizer { public: - static const Optimizer& Get(); - void Optimize(Graph& graph) const; + static void Optimize(Graph& graph); private: ~Optimizer() = default; - Optimizer(std::initializer_list optimizations) : m_Optimizations(optimizations) {} + Optimizer(); std::vector m_Optimizations; }; diff --git a/src/armnn/Runtime.cpp b/src/armnn/Runtime.cpp index ea6d19b..e0d6a9a 100644 --- a/src/armnn/Runtime.cpp +++ b/src/armnn/Runtime.cpp @@ -9,6 +9,7 @@ #ifdef ARMCOMPUTECL_ENABLED #include #include +#include #endif #include @@ -58,18 +59,26 @@ Status Runtime::LoadNetwork(NetworkId& networkIdOut, IOptimizedNetworkPtr inNetw m_LoadedNetworks[networkIdOut] = std::move(loadedNetwork); return Status::Success; - } Status Runtime::UnloadNetwork(NetworkId networkId) { +#ifdef ARMCOMPUTECL_ENABLED + if (arm_compute::CLScheduler::get().context()() != NULL) + { + arm_compute::CLScheduler::get().sync(); + } +#endif if (m_LoadedNetworks.erase(networkId) == 0) { BOOST_LOG_TRIVIAL(warning) << "WARNING: Runtime::UnloadNetwork(): " << networkId << " not found!"; return Status::Failure; } #ifdef ARMCOMPUTECL_ENABLED - arm_compute::CLKernelLibrary::get().clear_programs_cache(); + if (arm_compute::CLScheduler::get().context()() != NULL && m_LoadedNetworks.empty()) + { + m_WorkloadFactories.m_GpuAcc.get()->LoadOpenClRuntime(); + } #endif BOOST_LOG_TRIVIAL(debug) << "Runtime::UnloadNetwork(): Unloaded network with ID: " << networkId; return Status::Success; @@ -87,11 +96,24 @@ Runtime::Runtime(const CreationOptions& options) m_WorkloadFactories.m_CpuRef = make_shared( options.m_DefaultComputeDevice == Compute::CpuRef ? true : options.m_UseCpuRefAsFallback); m_WorkloadFactories.m_CpuAcc = make_shared(); - m_WorkloadFactories.m_GpuAcc = make_shared(); + m_WorkloadFactories.m_GpuAcc = make_shared(options.m_ClTunedParameters); if (options.m_DefaultComputeDevice == Compute::GpuAcc) { - m_WorkloadFactories.m_GpuAcc.get()->LoadOpenClRuntime(options.m_ClTunedParameters); + m_WorkloadFactories.m_GpuAcc.get()->LoadOpenClRuntime(); + } +} + +Runtime::~Runtime() +{ + std::vector networkIDs; + std::transform(m_LoadedNetworks.begin(), m_LoadedNetworks.end(), + std::back_inserter(networkIDs), + [](const auto &pair) { return pair.first; }); + + for (auto networkID : networkIDs) + { + UnloadNetwork(networkID); } } diff --git a/src/armnn/Runtime.hpp b/src/armnn/Runtime.hpp index d3f3a57..86fd48d 100644 --- a/src/armnn/Runtime.hpp +++ b/src/armnn/Runtime.hpp @@ -56,6 +56,8 @@ public: /// it cannot be setup for some reason. Runtime(const CreationOptions& options); + ~Runtime(); + private: friend void RuntimeLoadedNetworksReserve(armnn::Runtime* runtime); // see RuntimeTests.cpp diff --git a/src/armnn/SerializeLayerParameters.cpp b/src/armnn/SerializeLayerParameters.cpp new file mode 100644 index 0000000..e8c2bba --- /dev/null +++ b/src/armnn/SerializeLayerParameters.cpp @@ -0,0 +1,156 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// +#include "SerializeLayerParameters.hpp" +#include +#include +#include +#include + +namespace armnn +{ + +void +StringifyLayerParameters::Serialize(ParameterStringifyFunction & fn, + const PermuteDescriptor & desc) +{ + std::stringstream ss; + ss << "["; + bool addComma = false; + for (auto it=desc.m_DimMappings.begin(); it!= desc.m_DimMappings.end(); ++it) + { + if (addComma) + { + ss << ","; + } + ss << *it; + addComma = true; + } + ss << "]"; + + fn("DimMappings",ss.str()); +} + +void +StringifyLayerParameters::Serialize(ParameterStringifyFunction & fn, + const ReshapeDescriptor & desc) +{ + std::stringstream ss; + ss << "["; + bool addComma = false; + for (unsigned int i=0; i::Serialize(ParameterStringifyFunction & fn, + const ActivationDescriptor & desc) +{ + fn("Function",GetActivationFunctionAsCString(desc.m_Function)); + fn("A",std::to_string(desc.m_A)); + fn("B",std::to_string(desc.m_B)); +} + +void +StringifyLayerParameters::Serialize(ParameterStringifyFunction & fn, + const Convolution2dDescriptor & desc) +{ + { + std::stringstream ss; + ss << "(" << desc.m_PadTop << "," << desc.m_PadLeft + << "," << desc.m_PadBottom << "," << desc.m_PadRight << ")"; + fn("Padding(T,L,B,R)",ss.str()); + } + + { + std::stringstream ss; + ss << "(" << desc.m_StrideX << "," << desc.m_StrideY << ")"; + fn("Stride(X,Y)", ss.str()); + } + + fn("BiasEnabled",(desc.m_BiasEnabled?"true":"false")); +} + +void +StringifyLayerParameters::Serialize(ParameterStringifyFunction & fn, + const BatchNormalizationDescriptor & desc) +{ + fn("Eps",std::to_string(desc.m_Eps)); +} + +void +StringifyLayerParameters::Serialize(ParameterStringifyFunction & fn, + const DepthwiseConvolution2dDescriptor & desc) +{ + { + std::stringstream ss; + ss << "(" << desc.m_PadTop << "," << desc.m_PadLeft + << "," << desc.m_PadBottom << "," << desc.m_PadRight << ")"; + fn("Padding(T,L,B,R)",ss.str()); + } + + { + std::stringstream ss; + ss << "(" << desc.m_StrideX << "," << desc.m_StrideY << ")"; + fn("Stride(X,Y)", ss.str()); + } + + fn("BiasEnabled",(desc.m_BiasEnabled?"true":"false")); +} + +void +StringifyLayerParameters::Serialize(ParameterStringifyFunction & fn, + const Pooling2dDescriptor & desc) +{ + fn("Type", GetPoolingAlgorithmAsCString(desc.m_PoolType)); + { + std::stringstream ss; + ss << "(" << desc.m_PadTop << "," << desc.m_PadLeft + << "," << desc.m_PadBottom << "," << desc.m_PadRight << ")"; + fn("Padding(T,L,B,R)",ss.str()); + } + + { + std::stringstream ss; + ss << "(" << desc.m_PoolWidth << "," << desc.m_PoolHeight << ")"; + fn("(Width,Height)",ss.str()); + } + + { + std::stringstream ss; + ss << "(" << desc.m_StrideX << "," << desc.m_StrideY << ")"; + fn("Stride(X,Y)", ss.str()); + } + + fn("OutputShapeRounding", GetOutputShapeRoundingAsCString(desc.m_OutputShapeRounding)); + fn("PaddingMethod", GetPaddingMethodAsCString(desc.m_PaddingMethod)); +} + +void +StringifyLayerParameters::Serialize(ParameterStringifyFunction & fn, + const SoftmaxDescriptor & desc) +{ + fn("Beta", std::to_string(desc.m_Beta)); +} + +void +StringifyLayerParameters::Serialize(ParameterStringifyFunction & fn, + const FullyConnectedDescriptor & desc) +{ + fn("BiasEnabled", (desc.m_BiasEnabled?"true":"false")); + fn("TransposeWeightMatrix", (desc.m_TransposeWeightMatrix?"true":"false")); +} + + +} diff --git a/src/armnn/SerializeLayerParameters.hpp b/src/armnn/SerializeLayerParameters.hpp new file mode 100644 index 0000000..b008160 --- /dev/null +++ b/src/armnn/SerializeLayerParameters.hpp @@ -0,0 +1,73 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// +#pragma once + +#include +#include +#include + +namespace armnn +{ + +using ParameterStringifyFunction = std::function; + +/// +/// StringifyLayerParameters allows serializing layer parameters to string. +/// The default implementation is a no-op because this operation is considered +/// non-vital for ArmNN and thus we allow adding new layer parameters without +/// supplying the corresponding stringify functionality. +/// +template +struct StringifyLayerParameters +{ + static void Serialize(ParameterStringifyFunction &, const LayerParameter &) {} +}; + +template <> struct StringifyLayerParameters +{ + static void Serialize(ParameterStringifyFunction & fn, const PermuteDescriptor & desc); +}; + +template <> struct StringifyLayerParameters +{ + static void Serialize(ParameterStringifyFunction & fn, const ReshapeDescriptor & desc); +}; + +template <> struct StringifyLayerParameters +{ + static void Serialize(ParameterStringifyFunction & fn, const ActivationDescriptor & desc); +}; + +template <> struct StringifyLayerParameters +{ + static void Serialize(ParameterStringifyFunction & fn, const Convolution2dDescriptor & desc); +}; + +template <> struct StringifyLayerParameters +{ + static void Serialize(ParameterStringifyFunction & fn, const BatchNormalizationDescriptor & desc); +}; + +template <> struct StringifyLayerParameters +{ + static void Serialize(ParameterStringifyFunction & fn, const DepthwiseConvolution2dDescriptor & desc); +}; + +template <> struct StringifyLayerParameters +{ + static void Serialize(ParameterStringifyFunction & fn, const Pooling2dDescriptor & desc); +}; + +template <> struct StringifyLayerParameters +{ + static void Serialize(ParameterStringifyFunction & fn, const SoftmaxDescriptor & desc); +}; + +template <> struct StringifyLayerParameters +{ + static void Serialize(ParameterStringifyFunction & fn, const FullyConnectedDescriptor & desc); +}; + +} \ No newline at end of file diff --git a/src/armnn/backends/ArmComputeTensorUtils.cpp b/src/armnn/backends/ArmComputeTensorUtils.cpp index 9f21c41..f88ed2b 100644 --- a/src/armnn/backends/ArmComputeTensorUtils.cpp +++ b/src/armnn/backends/ArmComputeTensorUtils.cpp @@ -78,6 +78,7 @@ arm_compute::PoolingLayerInfo BuildArmComputePoolingLayerInfo(const Pooling2dDes using arm_compute::DimensionRoundingType; using arm_compute::PadStrideInfo; using arm_compute::PoolingLayerInfo; + using arm_compute::Size2D; // Resolve ARM Compute layer parameters const PoolingType poolingType = ConvertPoolingAlgorithmToAclPoolingType(descriptor.m_PoolType); @@ -94,7 +95,9 @@ arm_compute::PoolingLayerInfo BuildArmComputePoolingLayerInfo(const Pooling2dDes const bool excludePadding = (descriptor.m_PaddingMethod == PaddingMethod::Exclude); - return arm_compute::PoolingLayerInfo(poolingType, descriptor.m_PoolWidth, padStrideInfo, excludePadding); + const Size2D poolSize(descriptor.m_PoolWidth, descriptor.m_PoolHeight); + + return arm_compute::PoolingLayerInfo(poolingType, poolSize, padStrideInfo, excludePadding); } arm_compute::NormalizationLayerInfo BuildArmComputeNormalizationLayerInfo(const NormalizationDescriptor& descriptor) @@ -114,7 +117,7 @@ arm_compute::PermutationVector BuildArmComputePermutationVector(const armnn::Per arm_compute::PermutationVector aclPerm; unsigned int start = 0; - while ((start == perm[start]) && (start < perm.GetSize())) + while ((start < perm.GetSize()) && (start == perm[start])) { ++start; } diff --git a/src/armnn/backends/ClWorkloadFactory.cpp b/src/armnn/backends/ClWorkloadFactory.cpp index 4e565a0..6af657b 100644 --- a/src/armnn/backends/ClWorkloadFactory.cpp +++ b/src/armnn/backends/ClWorkloadFactory.cpp @@ -35,24 +35,62 @@ bool ClWorkloadFactory::IsLayerSupported(const Layer& layer, DataType dataType, #ifdef ARMCOMPUTECL_ENABLED -void ClWorkloadFactory::LoadOpenClRuntime(IClTunedParameters* clTunedParameters) +ClWorkloadFactory::ClWorkloadFactory(IClTunedParameters* clTunedParameters): + m_clTunedParameters(boost::polymorphic_downcast(clTunedParameters)) { - ClTunedParameters* clTunedParametersImpl = boost::polymorphic_downcast(clTunedParameters); + try + { + std::vector platforms; + cl::Platform::get(&platforms); + + // Select default platform as the first element + cl::Platform::setDefault(platforms[0]); + + std::vector devices; + platforms[0].getDevices(CL_DEVICE_TYPE_GPU, &devices); + + // Select default device as the first element + cl::Device::setDefault(devices[0]); + } + catch (const cl::Error& clError) + { + throw ClRuntimeUnavailableException(boost::str(boost::format( + "Could not initialize the CL runtime. Error description: %1%. CL error code: %2%" + ) % clError.what() % clError.err())); + } + + // Remove the use of global CL context + cl::Context::setDefault(cl::Context{}); + BOOST_ASSERT(cl::Context::getDefault()() == NULL); - cl::Device device; + // Remove the use of global CL command queue + cl::CommandQueue::setDefault(cl::CommandQueue{}); + BOOST_ASSERT(cl::CommandQueue::getDefault()() == NULL); +} + +ClWorkloadFactory::~ClWorkloadFactory() +{ +} + +void ClWorkloadFactory::LoadOpenClRuntime() +{ + cl::Device device = cl::Device::getDefault(); cl::Context context; cl::CommandQueue commandQueue; try { - device = cl::Device::getDefault(); - context = cl::Context::getDefault(); + arm_compute::CLKernelLibrary::get().clear_programs_cache(); + arm_compute::CLScheduler::get().init(context, commandQueue, device); + arm_compute::CLKernelLibrary::get().init(".", context, device); + + context = cl::Context(device); bool enableProfiling = false; #if ARMNN_PROFILING_ENABLED enableProfiling = true; #endif - if (clTunedParametersImpl && clTunedParametersImpl->m_Mode == IClTunedParameters::Mode::UpdateTunedParameters) + if (m_clTunedParameters && m_clTunedParameters->m_Mode == IClTunedParameters::Mode::UpdateTunedParameters) { enableProfiling = true; // Needed for the CLTuner to work. } @@ -65,7 +103,7 @@ void ClWorkloadFactory::LoadOpenClRuntime(IClTunedParameters* clTunedParameters) else { // Use default queue - commandQueue = cl::CommandQueue::getDefault(); + commandQueue = cl::CommandQueue(context, device); } } catch (const cl::Error& clError) @@ -79,9 +117,9 @@ void ClWorkloadFactory::LoadOpenClRuntime(IClTunedParameters* clTunedParameters) arm_compute::CLKernelLibrary::get().init(".", context, device); arm_compute::ICLTuner* tuner = nullptr; - if (clTunedParameters) + if (m_clTunedParameters) { - tuner = &clTunedParametersImpl->m_Tuner; + tuner = &m_clTunedParameters->m_Tuner; } arm_compute::CLScheduler::get().init(context, commandQueue, device, tuner); } @@ -266,7 +304,16 @@ std::unique_ptr ClWorkloadFactory::CreateFloor(const FloorQueueDescri #else // #if ARMCOMPUTECL_ENABLED -void ClWorkloadFactory::LoadOpenClRuntime(IClTunedParameters* clTunedParameters) +ClWorkloadFactory::ClWorkloadFactory(IClTunedParameters* clTunedParameters) +{ + // No CL support +} + +ClWorkloadFactory::~ClWorkloadFactory() +{ +} + +void ClWorkloadFactory::LoadOpenClRuntime() { // No CL support } diff --git a/src/armnn/backends/ClWorkloadFactory.hpp b/src/armnn/backends/ClWorkloadFactory.hpp index 2477e23..e1e66c0 100644 --- a/src/armnn/backends/ClWorkloadFactory.hpp +++ b/src/armnn/backends/ClWorkloadFactory.hpp @@ -23,18 +23,22 @@ namespace armnn { class IClTunedParameters; +class ClTunedParameters; // ARM Compute OpenCL workload factory class ClWorkloadFactory : public IWorkloadFactory { public: - virtual ~ClWorkloadFactory(){}; + + ClWorkloadFactory(IClTunedParameters* clTunedParameters = nullptr); + + virtual ~ClWorkloadFactory(); virtual Compute GetCompute() const override { return Compute::GpuAcc; } static bool IsLayerSupported(const Layer& layer, DataType dataType, std::string& outReasonIfUnsupported); - void LoadOpenClRuntime(IClTunedParameters* clTunedParameters = nullptr); + void LoadOpenClRuntime(); virtual bool SupportsSubTensors() const override { return true; } @@ -109,6 +113,9 @@ public: virtual std::unique_ptr CreateFloor(const FloorQueueDescriptor& descriptor, const WorkloadInfo& info) const override; + +private: + ClTunedParameters* m_clTunedParameters; }; class ClTunedParameters : public IClTunedParameters diff --git a/src/armnn/backends/NeonLayerSupport.cpp b/src/armnn/backends/NeonLayerSupport.cpp index 382b15e..d8a3366 100644 --- a/src/armnn/backends/NeonLayerSupport.cpp +++ b/src/armnn/backends/NeonLayerSupport.cpp @@ -71,6 +71,22 @@ bool IsNeonDirectConvolutionPreferred(const TensorInfo& weightInfo, const Convol return preferDirectConvolution; } +bool IsNeonMultiplicationParamsSupported(std::string* reasonIfUnsupported, + const TensorInfo& info0, + const TensorInfo& info1) +{ + if (info0.GetShape() == info1.GetShape()) + { + return true; + } + + if (reasonIfUnsupported) + { + *reasonIfUnsupported = "Multiplication on Neon does not support implicit broadcast."; + } + return false; +} + bool IsNeonNormalizationDescParamsSupported(std::string* reasonIfUnsupported, const NormalizationDescriptor& parameters) { if (parameters.m_NormMethodType != NormalizationAlgorithmMethod::LocalBrightness) @@ -233,7 +249,7 @@ bool IsConvolution2dSupportedNeon(const TensorInfo& input, return IsSupportedForDataTypeNeon(reasonIfUnsupported, input.GetDataType(), &TrueFunc<>, - &FalseFuncU8<>); + &TrueFunc<>); } bool IsDepthwiseConvolutionSupportedNeon(const TensorInfo& input, @@ -293,11 +309,13 @@ bool IsMultiplicationSupportedNeon(const TensorInfo& input0, const TensorInfo& input1, std::string* reasonIfUnsupported) { - ignore_unused(input1); return IsSupportedForDataTypeNeon(reasonIfUnsupported, input0.GetDataType(), - &TrueFunc<>, - &FalseFuncU8<>); + &IsNeonMultiplicationParamsSupported, + &FalseFuncU8, + input0, + input1 + ); } bool IsNormalizationSupportedNeon(const TensorInfo& input, diff --git a/src/armnn/backends/NeonWorkloadFactory.cpp b/src/armnn/backends/NeonWorkloadFactory.cpp index 3842841..0f65a3d 100644 --- a/src/armnn/backends/NeonWorkloadFactory.cpp +++ b/src/armnn/backends/NeonWorkloadFactory.cpp @@ -112,7 +112,7 @@ std::unique_ptr NeonWorkloadFactory::CreatePooling2d(const Poo std::unique_ptr NeonWorkloadFactory::CreateConvolution2d( const Convolution2dQueueDescriptor& descriptor, const WorkloadInfo& info) const { - return MakeWorkload(descriptor, info); + return MakeWorkload(descriptor, info); } std::unique_ptr NeonWorkloadFactory::CreateDepthwiseConvolution2d( diff --git a/src/armnn/backends/NeonWorkloads.hpp b/src/armnn/backends/NeonWorkloads.hpp index 7e9e885..83a3e9f 100644 --- a/src/armnn/backends/NeonWorkloads.hpp +++ b/src/armnn/backends/NeonWorkloads.hpp @@ -13,7 +13,9 @@ #include "backends/NeonWorkloads/NeonBatchNormalizationFloat32Workload.hpp" #include "backends/NeonWorkloads/NeonConstantFloat32Workload.hpp" #include "backends/NeonWorkloads/NeonConstantUint8Workload.hpp" +#include "backends/NeonWorkloads/NeonConvolution2dBaseWorkload.hpp" #include "backends/NeonWorkloads/NeonConvolution2dFloat32Workload.hpp" +#include "backends/NeonWorkloads/NeonConvolution2dUint8Workload.hpp" #include "backends/NeonWorkloads/NeonDepthwiseConvolutionFloat32Workload.hpp" #include "backends/NeonWorkloads/NeonDepthwiseConvolutionUint8Workload.hpp" #include "backends/NeonWorkloads/NeonFloorFloat32Workload.hpp" diff --git a/src/armnn/backends/NeonWorkloads/NeonConvolution2dBaseWorkload.cpp b/src/armnn/backends/NeonWorkloads/NeonConvolution2dBaseWorkload.cpp index 5099965..10c96d8 100644 --- a/src/armnn/backends/NeonWorkloads/NeonConvolution2dBaseWorkload.cpp +++ b/src/armnn/backends/NeonWorkloads/NeonConvolution2dBaseWorkload.cpp @@ -73,10 +73,6 @@ NeonConvolution2dBaseWorkload::NeonConvolution2dBaseWorkload(const Con using Type = ResolveType; InitialiseArmComputeTensorData(m_KernelTensor, m_Data.m_Weight->template GetConstTensor()); - if (m_Data.m_Parameters.m_BiasEnabled) - { - InitialiseArmComputeTensorData(m_BiasTensor, m_Data.m_Bias->template GetConstTensor()); - } } // Generate known implementations for linker diff --git a/src/armnn/backends/NeonWorkloads/NeonConvolution2dBaseWorkload.hpp b/src/armnn/backends/NeonWorkloads/NeonConvolution2dBaseWorkload.hpp index 3774051..98d075a 100644 --- a/src/armnn/backends/NeonWorkloads/NeonConvolution2dBaseWorkload.hpp +++ b/src/armnn/backends/NeonWorkloads/NeonConvolution2dBaseWorkload.hpp @@ -3,6 +3,8 @@ // See LICENSE file in the project root for full license information. // +#pragma once + #include #include diff --git a/src/armnn/backends/NeonWorkloads/NeonConvolution2dFloat32Workload.cpp b/src/armnn/backends/NeonWorkloads/NeonConvolution2dFloat32Workload.cpp index b4650ac..a8c5c63 100644 --- a/src/armnn/backends/NeonWorkloads/NeonConvolution2dFloat32Workload.cpp +++ b/src/armnn/backends/NeonWorkloads/NeonConvolution2dFloat32Workload.cpp @@ -15,7 +15,12 @@ using namespace armcomputetensorutils; NeonConvolution2dFloat32Workload::NeonConvolution2dFloat32Workload(const Convolution2dQueueDescriptor& descriptor, const WorkloadInfo& info) : NeonConvolution2dBaseWorkload(descriptor, info) -{} +{ + if (m_Data.m_Parameters.m_BiasEnabled) + { + InitialiseArmComputeTensorData(m_BiasTensor, m_Data.m_Bias->template GetConstTensor()); + } +} void NeonConvolution2dFloat32Workload::Execute() const diff --git a/src/armnn/backends/NeonWorkloads/NeonConvolution2dUint8Workload.cpp b/src/armnn/backends/NeonWorkloads/NeonConvolution2dUint8Workload.cpp new file mode 100644 index 0000000..ae20522 --- /dev/null +++ b/src/armnn/backends/NeonWorkloads/NeonConvolution2dUint8Workload.cpp @@ -0,0 +1,33 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include "NeonConvolution2dUint8Workload.hpp" + + +namespace armnn +{ +NeonConvolution2dUint8Workload::NeonConvolution2dUint8Workload(const Convolution2dQueueDescriptor& descriptor, + const WorkloadInfo& info) + : NeonConvolution2dBaseWorkload(descriptor, info) +{ + if (m_Data.m_Parameters.m_BiasEnabled) + { + InitialiseArmComputeTensorData(m_BiasTensor, m_Data.m_Bias->template GetConstTensor()); + } +} + + +void NeonConvolution2dUint8Workload::Execute() const +{ + ARMNN_SCOPED_PROFILING_EVENT(Compute::CpuAcc, NeonConvolution2dUint8Workload_Execute); + m_ConvolutionLayer->run(); +} + +void NeonConvolution2dUint8Workload::ValidateData() const +{ + m_Data.ValidateInputsOutputs("NeonConvolution2dUint8Workload", 1, 1); +} + +} //namespace armnn \ No newline at end of file diff --git a/src/armnn/backends/NeonWorkloads/NeonConvolution2dUint8Workload.hpp b/src/armnn/backends/NeonWorkloads/NeonConvolution2dUint8Workload.hpp new file mode 100644 index 0000000..319d574 --- /dev/null +++ b/src/armnn/backends/NeonWorkloads/NeonConvolution2dUint8Workload.hpp @@ -0,0 +1,27 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#pragma once + +#include "NeonConvolution2dBaseWorkload.hpp" + +namespace armnn +{ + +class NeonConvolution2dUint8Workload : public NeonConvolution2dBaseWorkload +{ +public: + NeonConvolution2dUint8Workload(const Convolution2dQueueDescriptor& descriptor, const WorkloadInfo& info); + + virtual void ValidateData() const override; + virtual void Execute() const override; +private: +}; + +} //namespace armnnn + + + + diff --git a/src/armnn/backends/RefWorkloads/Addition.cpp b/src/armnn/backends/RefWorkloads/Addition.cpp index c26f82e..6d53a70 100644 --- a/src/armnn/backends/RefWorkloads/Addition.cpp +++ b/src/armnn/backends/RefWorkloads/Addition.cpp @@ -8,9 +8,6 @@ #include -namespace armnn -{ - namespace { @@ -24,6 +21,9 @@ void ElementwiseAddition(unsigned int numElements, const float* inData0, const f } // namespace +namespace armnn +{ + void Addition(const TensorShape& inShape0, const TensorShape& inShape1, const TensorShape& outShape, diff --git a/src/armnn/backends/RefWorkloads/Merger.hpp b/src/armnn/backends/RefWorkloads/Merger.hpp index 9695e45..476ced7 100644 --- a/src/armnn/backends/RefWorkloads/Merger.hpp +++ b/src/armnn/backends/RefWorkloads/Merger.hpp @@ -39,6 +39,7 @@ void Merger(const MergerQueueDescriptor& data) //split view extents are defined by the size of (the corresponding) input tensor const TensorInfo& inputInfo = GetTensorInfo(data.m_Inputs[viewIdx]); + BOOST_ASSERT(inputInfo.GetNumDimensions() == outputInfo0.GetNumDimensions()); // check all dimensions to see if this element is inside the given input view bool insideView = true; diff --git a/src/armnn/backends/RefWorkloads/Multiplication.cpp b/src/armnn/backends/RefWorkloads/Multiplication.cpp index 7f558d8..47c0f1c 100644 --- a/src/armnn/backends/RefWorkloads/Multiplication.cpp +++ b/src/armnn/backends/RefWorkloads/Multiplication.cpp @@ -4,18 +4,48 @@ // #include "Multiplication.hpp" +#include "Broadcast.hpp" -namespace armnn +#include + +namespace { -void Multiplication(const float* in0, - const float* in1, - unsigned int numElements, - float* out) +void ElementwiseMultiplication(unsigned int numElements, + const float* inData0, + const float* inData1, + float* outData) { for (unsigned int i = 0; i < numElements; ++i) { - out[i] = in0[i] * in1[i]; + outData[i] = inData0[i] * inData1[i]; + } +} + +} // namespace + +namespace armnn +{ + +void Multiplication(const TensorShape& inShape0, + const TensorShape& inShape1, + const TensorShape& outShape, + const float* inData0, + const float* inData1, + float* outData) +{ + if (inShape0 == inShape1) + { + ElementwiseMultiplication(inShape0.GetNumElements(), inData0, inData1, outData); + } + else + { + BroadcastLoop(inShape0, inShape1, outShape).Unroll( + std::multiplies(), + 0, + inData0, + inData1, + outData); } } diff --git a/src/armnn/backends/RefWorkloads/Multiplication.hpp b/src/armnn/backends/RefWorkloads/Multiplication.hpp index d0b033e..54fcac5 100644 --- a/src/armnn/backends/RefWorkloads/Multiplication.hpp +++ b/src/armnn/backends/RefWorkloads/Multiplication.hpp @@ -5,12 +5,16 @@ #pragma once +#include + namespace armnn { -void Multiplication(const float* in0, - const float* in1, - unsigned int numElements, - float* out); +void Multiplication(const TensorShape& inShape0, + const TensorShape& inShape1, + const TensorShape& outShape, + const float* inData0, + const float* inData1, + float* outData); } //namespace armnn diff --git a/src/armnn/backends/RefWorkloads/Pooling2d.cpp b/src/armnn/backends/RefWorkloads/Pooling2d.cpp index 6d15d8a..a643e67 100644 --- a/src/armnn/backends/RefWorkloads/Pooling2d.cpp +++ b/src/armnn/backends/RefWorkloads/Pooling2d.cpp @@ -186,8 +186,8 @@ void Pooling2d(const float* in, // Clamp the pooling region inside the valid input area (which includes the padding). // This is necessary because the final pooling in a row may overlap beyond the padding. - hend = std::min(hend, heightInput + padRight); - wend = std::min(wend, widthInput + padBottom); + hend = std::min(hend, heightInput + padBottom); + wend = std::min(wend, widthInput + padRight); float result = defaultInitializer; float poolAreaSize = boost::numeric_cast((hend - hstart) * (wend - wstart)); diff --git a/src/armnn/backends/RefWorkloads/RefMultiplicationFloat32Workload.cpp b/src/armnn/backends/RefWorkloads/RefMultiplicationFloat32Workload.cpp index ed68b1f..d7c54d9 100644 --- a/src/armnn/backends/RefWorkloads/RefMultiplicationFloat32Workload.cpp +++ b/src/armnn/backends/RefWorkloads/RefMultiplicationFloat32Workload.cpp @@ -17,12 +17,15 @@ void RefMultiplicationFloat32Workload::Execute() const { ARMNN_SCOPED_PROFILING_EVENT(Compute::CpuRef, "RefMultiplicationFloat32Workload_Execute"); - const TensorInfo& inputInfo0 = GetTensorInfo(m_Data.m_Inputs[0]); + const TensorShape& inShape0 = GetTensorInfo(m_Data.m_Inputs[0]).GetShape(); + const TensorShape& inShape1 = GetTensorInfo(m_Data.m_Inputs[1]).GetShape(); + const TensorShape& outShape = GetTensorInfo(m_Data.m_Outputs[0]).GetShape(); float* outputData = GetOutputTensorDataFloat(0, m_Data); const float* inputData0 = GetInputTensorDataFloat(0, m_Data); const float* inputData1 = GetInputTensorDataFloat(1, m_Data); - Multiplication(inputData0, inputData1, inputInfo0.GetNumElements(), outputData); + + Multiplication(inShape0, inShape1, outShape, inputData0, inputData1, outputData); } } //namespace armnn diff --git a/src/armnn/backends/RefWorkloads/RefMultiplicationUint8Workload.cpp b/src/armnn/backends/RefWorkloads/RefMultiplicationUint8Workload.cpp index 2e6f0e6..d5c4afd 100644 --- a/src/armnn/backends/RefWorkloads/RefMultiplicationUint8Workload.cpp +++ b/src/armnn/backends/RefWorkloads/RefMultiplicationUint8Workload.cpp @@ -27,10 +27,9 @@ void RefMultiplicationUint8Workload::Execute() const auto dequant1 = Dequantize(GetInputTensorDataU8(1, m_Data), inputInfo1); std::vector results(outputInfo.GetNumElements()); - Multiplication(dequant0.data(), - dequant1.data(), - inputInfo0.GetNumElements(), - results.data()); + Multiplication( + inputInfo0.GetShape(), inputInfo1.GetShape(), outputInfo.GetShape(), + dequant0.data(), dequant1.data(),results.data()); Quantize(GetOutputTensorDataU8(0, m_Data), results.data(), outputInfo); } diff --git a/src/armnn/backends/RefWorkloads/Splitter.hpp b/src/armnn/backends/RefWorkloads/Splitter.hpp index 67f6c10..74c4cb4 100644 --- a/src/armnn/backends/RefWorkloads/Splitter.hpp +++ b/src/armnn/backends/RefWorkloads/Splitter.hpp @@ -41,6 +41,7 @@ void Splitter(const SplitterQueueDescriptor& data) //split view extents are defined by the size of (the corresponding) input tensor const TensorInfo& outputInfo = GetTensorInfo(data.m_Outputs[viewIdx]); + BOOST_ASSERT(outputInfo.GetNumDimensions() == inputInfo0.GetNumDimensions()); // check all dimensions to see if this element is inside the given input view bool insideView = true; diff --git a/src/armnn/backends/WorkloadData.cpp b/src/armnn/backends/WorkloadData.cpp index 96a3780..c951fc5 100644 --- a/src/armnn/backends/WorkloadData.cpp +++ b/src/armnn/backends/WorkloadData.cpp @@ -502,16 +502,13 @@ void MultiplicationQueueDescriptor::Validate(const WorkloadInfo& workloadInfo) c { ValidateTwoInputs(workloadInfo, "MultiplicationQueueDescriptor"); ValidateSingleOutput(workloadInfo, "MultiplicationQueueDescriptor"); - ValidateTensorShapesMatch(workloadInfo.m_InputTensorInfos[0], - workloadInfo.m_InputTensorInfos[1], - "MultiplicationQueueDescriptor", - "first input", - "second input"); - ValidateTensorShapesMatch(workloadInfo.m_InputTensorInfos[0], - workloadInfo.m_OutputTensorInfos[0], - "MultiplicationQueueDescriptor", - "input", - "output"); + + ValidateBroadcastTensorShapesMatch(workloadInfo.m_InputTensorInfos[0], + workloadInfo.m_InputTensorInfos[1], + workloadInfo.m_OutputTensorInfos[0], + "MultiplicationQueueDescriptor", + "first input", + "second input"); } void BatchNormalizationQueueDescriptor::Validate(const WorkloadInfo& workloadInfo) const diff --git a/src/armnn/backends/test/ArmComputeCl.cpp b/src/armnn/backends/test/ArmComputeCl.cpp index 5933ceb..c45a82d 100644 --- a/src/armnn/backends/test/ArmComputeCl.cpp +++ b/src/armnn/backends/test/ArmComputeCl.cpp @@ -103,7 +103,7 @@ ARMNN_AUTO_TEST_CASE(IgnorePaddingSimpleAveragePooling2d, IgnorePaddingSimpleAve ARMNN_AUTO_TEST_CASE(IgnorePaddingSimpleAveragePooling2dUint8, IgnorePaddingSimpleAveragePooling2dUint8Test) ARMNN_AUTO_TEST_CASE(IgnorePaddingSimpleAveragePooling2dNoPadding, IgnorePaddingSimpleAveragePooling2dNoPaddingTest) ARMNN_AUTO_TEST_CASE(IgnorePaddingSimpleAveragePooling2dNoPaddingUint8, - IgnorePaddingSimpleAveragePooling2dNoPaddingUint8Test) + IgnorePaddingSimpleAveragePooling2dNoPaddingUint8Test) ARMNN_AUTO_TEST_CASE(IgnorePaddingAveragePooling2dSize3, IgnorePaddingAveragePooling2dSize3Test) ARMNN_AUTO_TEST_CASE(IgnorePaddingAveragePooling2dSize3Uint8, IgnorePaddingAveragePooling2dSize3Uint8Test) @@ -114,6 +114,12 @@ ARMNN_AUTO_TEST_CASE(UNSUPPORTED_IgnorePaddingL2Pooling2dSize3Uint8, IgnorePaddi ARMNN_AUTO_TEST_CASE(SimpleAveragePooling2d, SimpleAveragePooling2dTest) ARMNN_AUTO_TEST_CASE(SimpleAveragePooling2dUint8, SimpleAveragePooling2dUint8Test) +ARMNN_AUTO_TEST_CASE(IgnorePaddingAveragePooling2dSize3x2Stride2x2, + IgnorePaddingAveragePooling2dSize3x2Stride2x2Test, + false) +ARMNN_AUTO_TEST_CASE(IgnorePaddingAveragePooling2dSize3x2Stride2x2NoPadding, + IgnorePaddingAveragePooling2dSize3x2Stride2x2Test, + true) ARMNN_AUTO_TEST_CASE(LargeTensorsAveragePooling2d, LargeTensorsAveragePooling2dTest) ARMNN_AUTO_TEST_CASE(LargeTensorsAveragePooling2dUint8, LargeTensorsAveragePooling2dUint8Test) @@ -136,6 +142,8 @@ ARMNN_AUTO_TEST_CASE(AddBroadcast1Element, AdditionBroadcast1ElementTest) // Mul ARMNN_AUTO_TEST_CASE(SimpleMultiplication, MultiplicationTest) +ARMNN_AUTO_TEST_CASE(MultiplicationBroadcast1Element, MultiplicationBroadcast1ElementTest) +ARMNN_AUTO_TEST_CASE(MultiplicationBroadcast1DVector, MultiplicationBroadcast1DVectorTest) // Batch Norm ARMNN_AUTO_TEST_CASE(BatchNorm, BatchNormTest) @@ -194,6 +202,9 @@ ARMNN_AUTO_TEST_CASE(SimpleReshapeUint8, SimpleReshapeUint8Test) // Permute ARMNN_AUTO_TEST_CASE(SimplePermuteFloat32, SimplePermuteFloat32Test) ARMNN_AUTO_TEST_CASE(SimplePermuteUint8, SimplePermuteUint8Test) +ARMNN_AUTO_TEST_CASE(PermuteFloat32ValueSet1, PermuteFloat32ValueSet1Test) +ARMNN_AUTO_TEST_CASE(PermuteFloat32ValueSet2, PermuteFloat32ValueSet2Test) +ARMNN_AUTO_TEST_CASE(PermuteFloat32ValueSet3, PermuteFloat32ValueSet3Test) // ============================================================================ // COMPARE tests diff --git a/src/armnn/backends/test/ArmComputeNeon.cpp b/src/armnn/backends/test/ArmComputeNeon.cpp index dd8a668..a81b7cd 100644 --- a/src/armnn/backends/test/ArmComputeNeon.cpp +++ b/src/armnn/backends/test/ArmComputeNeon.cpp @@ -141,6 +141,7 @@ ARMNN_AUTO_TEST_CASE(SimpleMaxPooling2dSize3x3Stride2x4, SimpleMaxPooling2dSize3 ARMNN_AUTO_TEST_CASE(SimpleMaxPooling2dSize3x3Stride2x4Uint8, SimpleMaxPooling2dSize3x3Stride2x4Uint8Test, true) ARMNN_AUTO_TEST_CASE(SimpleAveragePooling2d, SimpleAveragePooling2dTest) ARMNN_AUTO_TEST_CASE(SimpleAveragePooling2dUint8, SimpleAveragePooling2dUint8Test) + ARMNN_AUTO_TEST_CASE(LargeTensorsAveragePooling2d, LargeTensorsAveragePooling2dTest) ARMNN_AUTO_TEST_CASE(LargeTensorsAveragePooling2dUint8, LargeTensorsAveragePooling2dUint8Test) @@ -170,6 +171,11 @@ ARMNN_AUTO_TEST_CASE(IgnorePaddingSimpleAveragePooling2dNoPaddingUint8, IgnorePaddingSimpleAveragePooling2dNoPaddingUint8Test) ARMNN_AUTO_TEST_CASE(IgnorePaddingAveragePooling2dSize3, IgnorePaddingAveragePooling2dSize3Test) ARMNN_AUTO_TEST_CASE(IgnorePaddingAveragePooling2dSize3Uint8, IgnorePaddingAveragePooling2dSize3Uint8Test) +ARMNN_AUTO_TEST_CASE(IgnorePaddingAveragePooling2dSize3x2Stride2x2, + IgnorePaddingAveragePooling2dSize3x2Stride2x2Test, false) +ARMNN_AUTO_TEST_CASE(IgnorePaddingAveragePooling2dSize3x2Stride2x2NoPadding, + IgnorePaddingAveragePooling2dSize3x2Stride2x2Test, + true) ARMNN_AUTO_TEST_CASE(IgnorePaddingSimpleL2Pooling2d, IgnorePaddingSimpleL2Pooling2dTest) ARMNN_AUTO_TEST_CASE(UNSUPPORTED_IgnorePaddingSimpleL2Pooling2dUint8, IgnorePaddingSimpleL2Pooling2dUint8Test) @@ -281,6 +287,10 @@ ARMNN_AUTO_TEST_CASE(SimpleReshapeUint8, SimpleReshapeUint8Test) // Permute ARMNN_AUTO_TEST_CASE(SimplePermuteFloat32, SimplePermuteFloat32Test) ARMNN_AUTO_TEST_CASE(SimplePermuteUint8, SimplePermuteUint8Test) +ARMNN_AUTO_TEST_CASE(PermuteFloat32ValueSet1, PermuteFloat32ValueSet1Test) +ARMNN_AUTO_TEST_CASE(PermuteFloat32ValueSet2, PermuteFloat32ValueSet2Test) +ARMNN_AUTO_TEST_CASE(PermuteFloat32ValueSet3, PermuteFloat32ValueSet3Test) + // ============================================================================ // COMPARE tests diff --git a/src/armnn/backends/test/LayerTests.cpp b/src/armnn/backends/test/LayerTests.cpp index 76681f9..9eed2db 100644 --- a/src/armnn/backends/test/LayerTests.cpp +++ b/src/armnn/backends/test/LayerTests.cpp @@ -1005,31 +1005,22 @@ LayerTestResult CompareAdditionTest(armnn::IWorkloadFactory& workloadFa return ret; } -LayerTestResult MultiplicationTest(armnn::IWorkloadFactory& workloadFactory) -{ - const unsigned int width = 2; - const unsigned int height = 2; - const unsigned int channelCount = 2; - const unsigned int batchSize = 2; - - armnn::TensorInfo inputTensorInfo0; - armnn::TensorInfo inputTensorInfo1; - armnn::TensorInfo outputTensorInfo; - - constexpr unsigned int shape[] = { batchSize, channelCount, height, width }; - constexpr std::size_t dimensionCount = std::extent::value; - - inputTensorInfo0 = armnn::TensorInfo(dimensionCount, shape, armnn::DataType::Float32); - inputTensorInfo1 = armnn::TensorInfo(dimensionCount, shape, armnn::DataType::Float32); - outputTensorInfo = armnn::TensorInfo(dimensionCount, shape, armnn::DataType::Float32); - - auto input0 = MakeTensor(inputTensorInfo0, std::vector({ - 1, 1, 1, 1, 2, 2, 2, 2, - 3, 3, 3, 3, 4, 4, 4, 4 })); - - auto input1 = MakeTensor(inputTensorInfo1, std::vector({ - 2, 2, 2, 2, 3, 3, 3, 3, - 4, 4, 4, 4, 5, 5, 5, 5 })); +namespace { +LayerTestResult MultiplicationTestHelper(armnn::IWorkloadFactory& workloadFactory, + const unsigned int shape0[4], + const std::vector & values0, + const unsigned int shape1[4], + const std::vector & values1, + const unsigned int outShape[4], + const std::vector & outValues) +{ + const size_t dimensionCount = 4; + armnn::TensorInfo inputTensorInfo0{dimensionCount, shape0, armnn::DataType::Float32}; + armnn::TensorInfo inputTensorInfo1{dimensionCount, shape1, armnn::DataType::Float32}; + armnn::TensorInfo outputTensorInfo{dimensionCount, outShape, armnn::DataType::Float32}; + + auto input0 = MakeTensor(inputTensorInfo0, values0); + auto input1 = MakeTensor(inputTensorInfo1, values1); LayerTestResult ret(outputTensorInfo); @@ -1056,11 +1047,84 @@ LayerTestResult MultiplicationTest(armnn::IWorkloadFactory& workloadFac CopyDataFromITensorHandle(&ret.output[0][0][0][0], outputHandle.get()); - ret.outputExpected = MakeTensor(outputTensorInfo, std::vector({ + ret.outputExpected = MakeTensor(outputTensorInfo, outValues); + return ret; +} +} // anonymous namespace + + +LayerTestResult MultiplicationTest(armnn::IWorkloadFactory& workloadFactory) +{ + const unsigned int width = 2; + const unsigned int height = 2; + const unsigned int channelCount = 2; + const unsigned int batchSize = 2; + + unsigned int shape[] = { batchSize, channelCount, height, width }; + + std::vector input0({ + 1, 1, 1, 1, 2, 2, 2, 2, + 3, 3, 3, 3, 4, 4, 4, 4 }); + + std::vector input1({ + 2, 2, 2, 2, 3, 3, 3, 3, + 4, 4, 4, 4, 5, 5, 5, 5 }); + + std::vector output({ 2, 2, 2, 2, 6, 6, 6, 6, - 12, 12, 12, 12, 20, 20, 20, 20 })); + 12, 12, 12, 12, 20, 20, 20, 20 }); - return ret; + return MultiplicationTestHelper(workloadFactory, + shape, + input0, + shape, + input1, + shape, + output); +} + +LayerTestResult MultiplicationBroadcast1ElementTest(armnn::IWorkloadFactory& workloadFactory) +{ + unsigned int shape0[] = { 1, 2, 2, 2 }; + std::vector input0({ 1, 2, 3, 4, 5, 6, 7, 8}); + + unsigned int shape1[] = { 1, 1, 1, 1 }; + std::vector input1({ 2 }); + + std::vector output({ 2, 4, 6, 8, 10, 12, 14, 16}); + + return MultiplicationTestHelper(workloadFactory, + shape0, + input0, + shape1, + input1, + shape0, + output); +} + +LayerTestResult MultiplicationBroadcast1DVectorTest(armnn::IWorkloadFactory& workloadFactory) +{ + unsigned int shape0[] = { 1, 3, 3, 2 }; + std::vector input0({ + 1, 2, 3, 4, 5, 6, + 7, 8, 9, 10, 11, 12, + 13, 14, 15, 16, 17, 18}); + + unsigned int shape1[] = { 1, 1, 1, 2 }; + std::vector input1({ 1, 2 }); + + std::vector output({ + 1, 4, 3, 8, 5, 12, + 7, 16, 9, 20, 11, 24, + 13, 28, 15, 32, 17, 36}); + + return MultiplicationTestHelper(workloadFactory, + shape0, + input0, + shape1, + input1, + shape0, + output); } LayerTestResult CompareMultiplicationTest(armnn::IWorkloadFactory& workloadFactory, @@ -3253,69 +3317,59 @@ LayerTestResult AdditionUint8Test(armnn::IWorkloadFactory& workloadF return result; } -LayerTestResult MultiplicationUint8Test(armnn::IWorkloadFactory& workloadFactory) +namespace { - unsigned int batchSize = 1; - unsigned int channels = 2; - unsigned int height = 2; - unsigned int width = 3; +LayerTestResult MultiplicationUint8TestHelper(armnn::IWorkloadFactory& workloadFactory, + const unsigned int shape0[4], + const std::vector & values0, + float scale0, + int32_t offset0, + const unsigned int shape1[4], + const std::vector & values1, + float scale1, + int32_t offset1, + const unsigned int outShape[4], + const std::vector & outValues, + float outScale, + int32_t outOffset) +{ + armnn::TensorInfo inputTensorInfo0(4, shape0, armnn::DataType::QuantisedAsymm8); + armnn::TensorInfo inputTensorInfo1(4, shape1, armnn::DataType::QuantisedAsymm8); + armnn::TensorInfo outputTensorInfo(4, outShape, armnn::DataType::QuantisedAsymm8); - armnn::TensorInfo inputTensorInfo1, inputTensorInfo2; - armnn::TensorInfo outputTensorInfo; + inputTensorInfo0.SetQuantizationScale(scale0); + inputTensorInfo0.SetQuantizationOffset(offset0); - const unsigned int shape[] = { batchSize, channels, height, width }; - inputTensorInfo1 = armnn::TensorInfo(4, shape, armnn::DataType::QuantisedAsymm8); - inputTensorInfo1.SetQuantizationScale(4.0f); - inputTensorInfo1.SetQuantizationOffset(1); + inputTensorInfo1.SetQuantizationScale(scale1); + inputTensorInfo1.SetQuantizationOffset(offset1); - inputTensorInfo2 = armnn::TensorInfo(4, shape, armnn::DataType::QuantisedAsymm8); - inputTensorInfo2.SetQuantizationScale(3.0f); - inputTensorInfo2.SetQuantizationOffset(-2); + outputTensorInfo.SetQuantizationScale(outScale); + outputTensorInfo.SetQuantizationOffset(outOffset); - outputTensorInfo = armnn::TensorInfo(4, shape, armnn::DataType::QuantisedAsymm8); - outputTensorInfo.SetQuantizationScale(1366.255f); // Scale/offset chosen to have output values out of range - outputTensorInfo.SetQuantizationOffset(-5); + auto input0 = MakeTensor(inputTensorInfo0, values0); + auto input1 = MakeTensor(inputTensorInfo1, values1); - // See dequantized values to the right - auto input1 = MakeTensor(inputTensorInfo1, std::vector( - { - 62, 37, 3, 172, 13, 111, // 244, 144, 8, 684, 48, 440, - 188, 20, 73, 31, 23, 31 // 748, 76, 288, 120, 88, 120 - })); - - // See dequantized values to the right - auto input2 = MakeTensor(inputTensorInfo1, std::vector( - { - 126, 240, 252, 183, 121, 247, // 384, 726, 762, 555, 369, 747, - 48, 115, 151, 79, 78, 97 // 150, 351, 459, 243, 240, 297 - })); - - // See dequantized values to the right LayerTestResult result(outputTensorInfo); - result.outputExpected = MakeTensor(outputTensorInfo, std::vector( - { - 64, 72, 0, 255, 8, 236, // 93696, 104544, 6096(clamped), 379620(clamped), 17712, 328680, - 77, 15, 92, 16, 10, 21, // 112200, 26676, 132192, 29160, 21120, 35640 - })); + result.outputExpected = MakeTensor(outputTensorInfo, outValues); + std::unique_ptr inputHandle0 = workloadFactory.CreateTensorHandle(inputTensorInfo0); std::unique_ptr inputHandle1 = workloadFactory.CreateTensorHandle(inputTensorInfo1); - std::unique_ptr inputHandle2 = workloadFactory.CreateTensorHandle(inputTensorInfo2); std::unique_ptr outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo); armnn::MultiplicationQueueDescriptor data; armnn::WorkloadInfo info; - AddInputToWorkload(data, info, inputTensorInfo1, inputHandle1.get()); - AddInputToWorkload(data, info, inputTensorInfo2, inputHandle2.get()); + AddInputToWorkload(data, info, inputTensorInfo0, inputHandle0.get()); + AddInputToWorkload(data, info, inputTensorInfo1, inputHandle1.get()); AddOutputToWorkload(data, info, outputTensorInfo, outputHandle.get()); std::unique_ptr workload = workloadFactory.CreateMultiplication(data, info); + inputHandle0->Allocate(); inputHandle1->Allocate(); - inputHandle2->Allocate(); outputHandle->Allocate(); + CopyDataToITensorHandle(inputHandle0.get(), &input0[0][0][0][0]); CopyDataToITensorHandle(inputHandle1.get(), &input1[0][0][0][0]); - CopyDataToITensorHandle(inputHandle2.get(), &input2[0][0][0][0]); workload->Execute(); @@ -3323,6 +3377,113 @@ LayerTestResult MultiplicationUint8Test(armnn::IWorkloadFactory& wor return result; } +} // anonymous namespace + +LayerTestResult MultiplicationUint8Test(armnn::IWorkloadFactory& workloadFactory) +{ + unsigned int batchSize = 1; + unsigned int channels = 2; + unsigned int height = 2; + unsigned int width = 3; + const unsigned int shape[] = { batchSize, channels, height, width }; + + // See dequantized values to the right + std::vector input0({ + 62, 37, 3, 172, 13, 111, // 244, 144, 8, 684, 48, 440, + 188, 20, 73, 31, 23, 31 // 748, 76, 288, 120, 88, 120 + }); + + // See dequantized values to the right + std::vector input1({ + 126, 240, 252, 183, 121, 247, // 384, 726, 762, 555, 369, 747, + 48, 115, 151, 79, 78, 97 // 150, 351, 459, 243, 240, 297 + }); + + // See dequantized values to the right + std::vector output( + { + 64, 72, 0, 255, 8, 236, // 93696, 104544, 6096(clamped), 379620(clamped), 17712, 328680, + 77, 15, 92, 16, 10, 21, // 112200, 26676, 132192, 29160, 21120, 35640 + }); + + return MultiplicationUint8TestHelper(workloadFactory, + shape, + input0, + 4.0f, + 1, + shape, + input1, + 3.0f, + -2, + shape, + output, + 1366.255f, // Scale/offset chosen to have output values out of range + -5); +} + +LayerTestResult MultiplicationBroadcast1ElementUint8Test(armnn::IWorkloadFactory& workloadFactory) +{ + const unsigned int shape0[] = { 1, 2, 2, 3 }; + const unsigned int shape1[] = { 1, 1, 1, 1 }; + + std::vector input0({ + 1, 2, 3, 4, 5, 6, + 7, 8, 9, 10, 11, 12 + }); + + std::vector input1({2}); + + std::vector output({ + 2, 4, 6, 8, 10, 12, + 14, 16, 18, 20, 22, 24 + }); + + return MultiplicationUint8TestHelper(workloadFactory, + shape0, + input0, + 1.0f, + 0, + shape1, + input1, + 1.0f, + 0, + shape0, + output, + 1.0f, + 0); +} + +LayerTestResult MultiplicationBroadcast1DVectorUint8Test(armnn::IWorkloadFactory& workloadFactory) +{ + const unsigned int shape0[] = { 1, 2, 2, 3 }; + const unsigned int shape1[] = { 1, 1, 1, 3 }; + + std::vector input0({ + 1, 2, 3, 4, 5, 6, + 7, 8, 9, 10, 11, 12 + }); + + std::vector input1({1, 2, 3}); + + std::vector output({ + 1, 4, 9, 4, 10, 18, + 7, 16, 27, 10, 22, 36 + }); + + return MultiplicationUint8TestHelper(workloadFactory, + shape0, + input0, + 1.0f, + 0, + shape1, + input1, + 1.0f, + 0, + shape0, + output, + 1.0f, + 0); +} LayerTestResult ResizeBilinearNopUint8Test(armnn::IWorkloadFactory& workloadFactory) { @@ -3702,6 +3863,12 @@ LayerTestResult SimpleAveragePooling2dUint8Test(armnn::IWorkloadFact return SimpleAveragePooling2dTestCommon(workloadFactory, 0.5, -1); } +LayerTestResult IgnorePaddingAveragePooling2dSize3x2Stride2x2Test(armnn::IWorkloadFactory& workloadFactory, + bool forceNoPadding) +{ + return IgnorePaddingAveragePooling2dSize3x2Stride2x2TestCommon(workloadFactory, forceNoPadding); +} + LayerTestResult LargeTensorsAveragePooling2dTest(armnn::IWorkloadFactory& workloadFactory) { return LargeTensorsAveragePooling2dTestCommon(workloadFactory); @@ -3882,3 +4049,18 @@ LayerTestResult SimplePermuteUint8Test(armnn::IWorkloadFactory& work { return SimplePermuteUint8TestCommon(workloadFactory); }; + +LayerTestResult PermuteFloat32ValueSet1Test(armnn::IWorkloadFactory& workloadFactory) +{ + return PermuteFloat32ValueSet1TestCommon(workloadFactory); +}; + +LayerTestResult PermuteFloat32ValueSet2Test(armnn::IWorkloadFactory& workloadFactory) +{ + return PermuteFloat32ValueSet2TestCommon(workloadFactory); +}; + +LayerTestResult PermuteFloat32ValueSet3Test(armnn::IWorkloadFactory& workloadFactory) +{ + return PermuteFloat32ValueSet3TestCommon(workloadFactory); +}; diff --git a/src/armnn/backends/test/LayerTests.hpp b/src/armnn/backends/test/LayerTests.hpp index fc0c9c7..36e73e4 100644 --- a/src/armnn/backends/test/LayerTests.hpp +++ b/src/armnn/backends/test/LayerTests.hpp @@ -82,6 +82,8 @@ LayerTestResult IgnorePaddingMaxPooling2dSize3Uint8Test(armnn::IWork LayerTestResult SimpleAveragePooling2dTest(armnn::IWorkloadFactory& workloadFactory); LayerTestResult SimpleAveragePooling2dUint8Test(armnn::IWorkloadFactory& workloadFactory); +LayerTestResult IgnorePaddingAveragePooling2dSize3x2Stride2x2Test(armnn::IWorkloadFactory& workloadFactory, + bool forceNoPadding); LayerTestResult IgnorePaddingSimpleAveragePooling2dTest(armnn::IWorkloadFactory& workloadFactory); LayerTestResult IgnorePaddingSimpleAveragePooling2dUint8Test(armnn::IWorkloadFactory& workloadFactory); LayerTestResult IgnorePaddingSimpleAveragePooling2dNoPaddingTest(armnn::IWorkloadFactory& workloadFactory); @@ -187,6 +189,8 @@ LayerTestResult CompareActivationTest(armnn::IWorkloadFactory& worklo unsigned int batchSize); LayerTestResult MultiplicationTest(armnn::IWorkloadFactory& workloadFactory); +LayerTestResult MultiplicationBroadcast1ElementTest(armnn::IWorkloadFactory& workloadFactory); +LayerTestResult MultiplicationBroadcast1DVectorTest(armnn::IWorkloadFactory& workloadFactory); LayerTestResult CompareMultiplicationTest(armnn::IWorkloadFactory& workloadFactory, armnn::IWorkloadFactory& refWorkloadFactory); @@ -260,6 +264,8 @@ LayerTestResult CompareSoftmaxUint8Test(armnn::IWorkloadFactory& wor float beta); LayerTestResult MultiplicationUint8Test(armnn::IWorkloadFactory& workloadFactory); +LayerTestResult MultiplicationBroadcast1ElementUint8Test(armnn::IWorkloadFactory& workloadFactory); +LayerTestResult MultiplicationBroadcast1DVectorUint8Test(armnn::IWorkloadFactory& workloadFactory); LayerTestResult SimpleConvolution2d3x5Uint8Test(armnn::IWorkloadFactory& workloadFactory, bool biasEnabled); @@ -303,3 +309,6 @@ LayerTestResult FullyConnectedLargeTest(armnn::IWorkloadFactory& workl LayerTestResult SimplePermuteFloat32Test(armnn::IWorkloadFactory& workloadFactory); LayerTestResult SimplePermuteUint8Test(armnn::IWorkloadFactory& workloadFactory); +LayerTestResult PermuteFloat32ValueSet1Test(armnn::IWorkloadFactory& workloadFactory); +LayerTestResult PermuteFloat32ValueSet2Test(armnn::IWorkloadFactory& workloadFactory); +LayerTestResult PermuteFloat32ValueSet3Test(armnn::IWorkloadFactory& workloadFactory); diff --git a/src/armnn/backends/test/PermuteTestImpl.hpp b/src/armnn/backends/test/PermuteTestImpl.hpp index 4eafa1a..4ecffed 100644 --- a/src/armnn/backends/test/PermuteTestImpl.hpp +++ b/src/armnn/backends/test/PermuteTestImpl.hpp @@ -119,3 +119,107 @@ LayerTestResult SimplePermuteUint8TestCommon(armnn::IWorkloadFactory return SimplePermuteTestImpl(workloadFactory, descriptor, inputTensorInfo, outputTensorInfo, input, outputExpected); } + +LayerTestResult +PermuteFloat32ValueSet1TestCommon(armnn::IWorkloadFactory& workloadFactory) +{ + armnn::TensorInfo inputTensorInfo; + armnn::TensorInfo outputTensorInfo; + + unsigned int inputShape[] = { 1, 2, 2, 3 }; + unsigned int outputShape[] = { 1, 3, 2, 2 }; + + armnn::PermuteDescriptor descriptor; + descriptor.m_DimMappings = {0U, 2U, 3U, 1U}; + + inputTensorInfo = armnn::TensorInfo(4, inputShape, armnn::DataType::Float32); + outputTensorInfo = armnn::TensorInfo(4, outputShape, armnn::DataType::Float32); + + std::vector input = std::vector( + { + 1.0f, 2.0f, 3.0f, + 11.0f, 12.0f, 13.0f, + 21.0f, 22.0f, 23.0f, + 31.0f, 32.0f, 33.0f, + }); + + std::vector outputExpected = std::vector( + { + 1.0f, 11.0f, 21.0f, 31.0f, + 2.0f, 12.0f, 22.0f, 32.0f, + 3.0f, 13.0f, 23.0f, 33.0f, + }); + + return SimplePermuteTestImpl(workloadFactory, descriptor, inputTensorInfo, + outputTensorInfo, input, outputExpected); +} + +LayerTestResult +PermuteFloat32ValueSet2TestCommon(armnn::IWorkloadFactory& workloadFactory) +{ + armnn::TensorInfo inputTensorInfo; + armnn::TensorInfo outputTensorInfo; + + unsigned int inputShape[] = { 1, 3, 2, 2 }; + unsigned int outputShape[] = { 1, 2, 2, 3 }; + + armnn::PermuteDescriptor descriptor; + descriptor.m_DimMappings = {0U, 3U, 1U, 2U}; + + inputTensorInfo = armnn::TensorInfo(4, inputShape, armnn::DataType::Float32); + outputTensorInfo = armnn::TensorInfo(4, outputShape, armnn::DataType::Float32); + + std::vector input = std::vector( + { + 1.0f, 11.0f, 21.0f, 31.0f, + 2.0f, 12.0f, 22.0f, 32.0f, + 3.0f, 13.0f, 23.0f, 33.0f, + }); + + std::vector outputExpected = std::vector( + { + 1.0f, 2.0f, 3.0f, + 11.0f, 12.0f, 13.0f, + 21.0f, 22.0f, 23.0f, + 31.0f, 32.0f, 33.0f, + }); + + return SimplePermuteTestImpl(workloadFactory, descriptor, inputTensorInfo, + outputTensorInfo, input, outputExpected); +} + +LayerTestResult +PermuteFloat32ValueSet3TestCommon(armnn::IWorkloadFactory& workloadFactory) +{ + armnn::TensorInfo inputTensorInfo; + armnn::TensorInfo outputTensorInfo; + + unsigned int inputShape[] = { 1, 2, 3, 3 }; + unsigned int outputShape[] = { 1, 3, 2, 3 }; + + armnn::PermuteDescriptor descriptor; + descriptor.m_DimMappings = {0U, 2U, 3U, 1U}; + + inputTensorInfo = armnn::TensorInfo(4, inputShape, armnn::DataType::Float32); + outputTensorInfo = armnn::TensorInfo(4, outputShape, armnn::DataType::Float32); + + std::vector input = std::vector( + { + 1.0f, 2.0f, 3.0f, + 11.0f, 12.0f, 13.0f, + 21.0f, 22.0f, 23.0f, + 31.0f, 32.0f, 33.0f, + 41.0f, 42.0f, 43.0f, + 51.0f, 52.0f, 53.0f, + }); + + std::vector outputExpected = std::vector( + { + 1.0f, 11.0f, 21.0f, 31.0f, 41.0f, 51.0f, + 2.0f, 12.0f, 22.0f, 32.0f, 42.0f, 52.0f, + 3.0f, 13.0f, 23.0f, 33.0f, 43.0f, 53.0f, + }); + + return SimplePermuteTestImpl(workloadFactory, descriptor, inputTensorInfo, + outputTensorInfo, input, outputExpected); +} diff --git a/src/armnn/backends/test/Pooling2dTestImpl.hpp b/src/armnn/backends/test/Pooling2dTestImpl.hpp index fc84ddb..ab9fd6d 100644 --- a/src/armnn/backends/test/Pooling2dTestImpl.hpp +++ b/src/armnn/backends/test/Pooling2dTestImpl.hpp @@ -720,6 +720,83 @@ LayerTestResult SimpleMaxPooling2dSize2x2Stride2x2TestCommon(armnn::IWorkl return SimplePooling2dTestImpl(workloadFactory, descriptor, qScale, qOffset, input, outputExpected); } +// +// Tests max pooling with the following parameters: +// +// Pooling size: 3x2 +// Stride: (2,2) +// input size: 3x2 +// channels: 1 +// batch size: 1 +// +template +LayerTestResult IgnorePaddingAveragePooling2dSize3x2Stride2x2TestCommon( + armnn::IWorkloadFactory& workloadFactory, + bool forceNoPadding, + float qScale = 1.0f, + int32_t qOffset = 0) +{ + armnn::Pooling2dDescriptor descriptor; + descriptor.m_PoolType = armnn::PoolingAlgorithm::Average; + descriptor.m_PoolWidth = 3; + descriptor.m_PoolHeight = 2; + descriptor.m_StrideX = 2; + descriptor.m_StrideY = 2; + descriptor.m_PadLeft = (forceNoPadding) ? 0 : 1; + descriptor.m_PadRight = descriptor.m_PadLeft; + descriptor.m_PadTop = 0; + descriptor.m_PadBottom = 0; + descriptor.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor; + descriptor.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue; + + unsigned int inputWidth = 3; + unsigned int inputHeight = 2; + unsigned int outputWidth = + (inputWidth + descriptor.m_PadLeft + descriptor.m_PadRight + descriptor.m_StrideX - descriptor.m_PoolWidth) / + descriptor.m_StrideX; + unsigned int outputHeight = + (inputHeight + descriptor.m_PadTop + descriptor.m_PadBottom + descriptor.m_StrideY - descriptor.m_PoolHeight) / + descriptor.m_StrideY; + unsigned int channels = 1; + unsigned int batchSize = 1; + + std::vector inputData = { + 3.0f, 6.0f, 9.0f, + 12.0f, 15.0f, 18.0f, + }; + + std::vector expectedOutputDataWithPadding = { + 6.0f, 8.0f, + }; + + std::vector expectedOutputDataNoPadding = { + 10.5f, + }; + + armnn::TensorInfo inputTensorInfo({ batchSize, channels, inputHeight, inputWidth }, armnn::GetDataType()); + + // Scale and offset should match input - we're just calculating average values. + armnn::TensorInfo outputTensorInfo({ batchSize, channels, outputHeight, outputWidth }, armnn::GetDataType()); + + // Set quantization parameters if the requested type is a quantized type. + if(armnn::IsQuantizedType()) + { + inputTensorInfo.SetQuantizationScale(qScale); + inputTensorInfo.SetQuantizationOffset(qOffset); + outputTensorInfo.SetQuantizationScale(qScale); + outputTensorInfo.SetQuantizationOffset(qOffset); + } + + auto input = MakeTensor(inputTensorInfo, QuantizedVector(qScale, qOffset, inputData)); + + auto outputExpected = MakeTensor(outputTensorInfo, + forceNoPadding ? QuantizedVector(qScale, qOffset, expectedOutputDataNoPadding) : + QuantizedVector(qScale, qOffset, expectedOutputDataWithPadding)); + + return SimplePooling2dTestImpl(workloadFactory, descriptor, qScale, qOffset, input, outputExpected); +} + + template LayerTestResult IgnorePaddingSimpleMaxPooling2dTestCommon(armnn::IWorkloadFactory& workloadFactory, float qScale = 1.0f, diff --git a/src/armnn/backends/test/Reference.cpp b/src/armnn/backends/test/Reference.cpp index 87d82f1..89e5db8 100644 --- a/src/armnn/backends/test/Reference.cpp +++ b/src/armnn/backends/test/Reference.cpp @@ -76,6 +76,10 @@ ARMNN_AUTO_TEST_CASE(IgnorePaddingL2Pooling2dSize3Uint8, IgnorePaddingL2Pooling2 ARMNN_AUTO_TEST_CASE(SimpleAveragePooling2d, SimpleAveragePooling2dTest) ARMNN_AUTO_TEST_CASE(SimpleAveragePooling2dUint8, SimpleAveragePooling2dUint8Test) +ARMNN_AUTO_TEST_CASE(IgnorePaddingAveragePooling2dSize3x2Stride2x2, + IgnorePaddingAveragePooling2dSize3x2Stride2x2Test, false) +ARMNN_AUTO_TEST_CASE(IgnorePaddingAveragePooling2dSize3x2Stride2x2NoPadding, + IgnorePaddingAveragePooling2dSize3x2Stride2x2Test, true) ARMNN_AUTO_TEST_CASE(LargeTensorsAveragePooling2d, LargeTensorsAveragePooling2dTest) ARMNN_AUTO_TEST_CASE(LargeTensorsAveragePooling2dUint8, LargeTensorsAveragePooling2dUint8Test) @@ -158,7 +162,11 @@ ARMNN_AUTO_TEST_CASE(AddBroadcast1ElementUint8, AdditionBroadcast1ElementUint8Te // Mul ARMNN_AUTO_TEST_CASE(SimpleMultiplication, MultiplicationTest) +ARMNN_AUTO_TEST_CASE(MultiplicationBroadcast1Element, MultiplicationBroadcast1ElementTest) +ARMNN_AUTO_TEST_CASE(MultiplicationBroadcast1DVector, MultiplicationBroadcast1DVectorTest) ARMNN_AUTO_TEST_CASE(MultiplicationUint8, MultiplicationUint8Test) +ARMNN_AUTO_TEST_CASE(MultiplicationBroadcast1ElementUint8, MultiplicationBroadcast1ElementUint8Test) +ARMNN_AUTO_TEST_CASE(MultiplicationBroadcast1DVectorUint8, MultiplicationBroadcast1DVectorUint8Test) // Batch Norm ARMNN_AUTO_TEST_CASE(BatchNorm, BatchNormTest) @@ -227,5 +235,8 @@ ARMNN_AUTO_TEST_CASE(SimpleReshapeUint8, SimpleReshapeUint8Test) // Permute ARMNN_AUTO_TEST_CASE(SimplePermuteFloat32, SimplePermuteFloat32Test) ARMNN_AUTO_TEST_CASE(SimplePermuteUint8, SimplePermuteUint8Test) +ARMNN_AUTO_TEST_CASE(PermuteFloat32ValueSet1, PermuteFloat32ValueSet1Test) +ARMNN_AUTO_TEST_CASE(PermuteFloat32ValueSet2, PermuteFloat32ValueSet2Test) +ARMNN_AUTO_TEST_CASE(PermuteFloat32ValueSet3, PermuteFloat32ValueSet3Test) BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnn/optimizations/Optimization.hpp b/src/armnn/optimizations/Optimization.hpp index 89e03ff..f810718 100644 --- a/src/armnn/optimizations/Optimization.hpp +++ b/src/armnn/optimizations/Optimization.hpp @@ -13,7 +13,7 @@ namespace armnn class Optimization { public: - virtual void Run(Graph& graph, Graph::Iterator& pos) const = 0; + virtual void Run(Graph& graph, Layer& base) const = 0; protected: ~Optimization() = default; }; @@ -23,22 +23,20 @@ protected: // (curiously recurring template pattern). // For details, see https://en.wikipedia.org/wiki/Curiously_recurring_template_pattern -/// Wrapper Optimization base class that calls Wrapped::Run for every layer of type BaseType. -/// - Wrapped class mustn't remove the base layer. -/// - Base layer is removed if left unconnected after applying the wrapped optimization. +/// Wrapper Optimization base class that calls Wrapped::Run() for every layer of type BaseType. +/// - Wrapped class mustn't remove the base layer. The optimizer will remove it if left unconnected +/// after applying each optimization. template class OptimizeForTypeImpl : public armnn::Optimization, public Wrapped { public: using Wrapped::Wrapped; - void Run(Graph& graph, Graph::Iterator& pos) const override + void Run(Graph& graph, Layer& base) const override { - Layer* const base = *pos; - - if (base->GetType() == LayerEnumOf()) + if (base.GetType() == LayerEnumOf()) { - Wrapped::Run(graph, *boost::polymorphic_downcast(base)); + Wrapped::Run(graph, *boost::polymorphic_downcast(&base)); } } @@ -46,16 +44,16 @@ protected: ~OptimizeForTypeImpl() = default; }; -/// Specialization that calls Wrapped::Run for any layer type +/// Specialization that calls Wrapped::Run() for any layer type template class OptimizeForTypeImpl : public armnn::Optimization, public Wrapped { public: using Wrapped::Wrapped; - void Run(Graph& graph, Graph::Iterator& pos) const override + void Run(Graph& graph, Layer& base) const override { - Wrapped::Run(graph, **pos); + Wrapped::Run(graph, base); } protected: @@ -70,9 +68,10 @@ public: }; /// Wrapper Optimization class that calls Wrapped::Run for every connection BaseType -> ChildType. -/// - Wrapped class mustn't remove the base layer. +/// - Wrapped class mustn't remove the base layer. The optimizer will remove it if left unconnected +/// after applying each optimization. /// - Wrapped class mustn't affect existing connections in the same output. It might add new ones. -/// - Base and children layers are removed if left unconnected after applying the wrapped optimization. +/// - Children layers are removed if left unconnected after applying the wrapped optimization. template class OptimizeForConnectionImpl : public Wrapped { diff --git a/src/armnn/optimizations/OptimizeConsecutiveReshapes.hpp b/src/armnn/optimizations/OptimizeConsecutiveReshapes.hpp index deb49c6..9a926a5 100644 --- a/src/armnn/optimizations/OptimizeConsecutiveReshapes.hpp +++ b/src/armnn/optimizations/OptimizeConsecutiveReshapes.hpp @@ -18,8 +18,8 @@ public: /// Inserts an equivalent ReshapeLayer that bypasses both for that connection. void Run(Graph& graph, InputSlot& connection) const { - auto& base = connection.GetConnectedOutputSlot()->GetOwningLayer(); - auto& child = connection.GetOwningLayer(); + Layer& base = connection.GetConnectedOutputSlot()->GetOwningLayer(); + Layer& child = connection.GetOwningLayer(); BOOST_ASSERT(base.GetType() == LayerType::Reshape); BOOST_ASSERT(child.GetType() == LayerType::Reshape); diff --git a/src/armnn/optimizations/SquashEqualSiblings.hpp b/src/armnn/optimizations/SquashEqualSiblings.hpp index 2dfe91f..c5ce28e 100644 --- a/src/armnn/optimizations/SquashEqualSiblings.hpp +++ b/src/armnn/optimizations/SquashEqualSiblings.hpp @@ -26,19 +26,29 @@ public: if (!child.IsOutputUnconnected()) { OutputSlot& baseOutput = *connection.GetConnectedOutputSlot(); - auto& comparableChild = *boost::polymorphic_downcast(&child); - for (auto&& it : baseOutput.GetConnections()) + if (baseOutput.GetNumConnections() > 1) { - Layer& sibling = it->GetOwningLayer(); - if ((&sibling != &child) && comparableChild.IsEqual(sibling)) + auto& comparableChild = *boost::polymorphic_downcast(&child); + + Layer* lowestPriorityChild = &child; + for (auto&& it : baseOutput.GetConnections()) { - // Bypass sibling. It will be removed as it's left unconnected. - auto siblingOut = sibling.BeginOutputSlots(); - for (auto childOut = child.BeginOutputSlots(); childOut != child.EndOutputSlots(); ++childOut) + Layer* sibling = &it->GetOwningLayer(); + if ((sibling != lowestPriorityChild) && comparableChild.IsEqual(*sibling)) { - siblingOut->MoveAllConnections(*childOut); - ++siblingOut; + if (sibling->GetPriority() < lowestPriorityChild->GetPriority()) + { + std::swap(sibling, lowestPriorityChild); + } + // Bypass sibling. It will be removed as it's left unconnected. + auto siblingOut = sibling->BeginOutputSlots(); + for (auto lowestPriorityChildOut = lowestPriorityChild->BeginOutputSlots(); + lowestPriorityChildOut != lowestPriorityChild->EndOutputSlots(); ++lowestPriorityChildOut) + { + siblingOut->MoveAllConnections(*lowestPriorityChildOut); + ++siblingOut; + } } } } diff --git a/src/armnn/test/Network_test.cpp b/src/armnn/test/Network_test.cpp index 523d47b..057caa0 100644 --- a/src/armnn/test/Network_test.cpp +++ b/src/armnn/test/Network_test.cpp @@ -29,6 +29,64 @@ bool AreAllLayerInputSlotsConnected(const armnn::IConnectableLayer& layer) BOOST_AUTO_TEST_SUITE(Network) +BOOST_AUTO_TEST_CASE(LayerGuids) +{ + armnn::Network net; + armnn::LayerGuid inputId = net.AddInputLayer(0)->GetGuid(); + armnn::LayerGuid addId = net.AddAdditionLayer()->GetGuid(); + armnn::LayerGuid outputId = net.AddOutputLayer(0)->GetGuid(); + + BOOST_TEST(inputId != addId); + BOOST_TEST(addId != outputId); + BOOST_TEST(inputId != outputId); +} + +BOOST_AUTO_TEST_CASE(SerializeToDot) +{ + armnn::Network net; + + //define layers + auto input = net.AddInputLayer(0); + auto add = net.AddAdditionLayer(); + auto output = net.AddOutputLayer(0); + + // connect layers + input->GetOutputSlot(0).Connect(add->GetInputSlot(0)); + input->GetOutputSlot(0).Connect(add->GetInputSlot(1)); + add->GetOutputSlot(0).Connect(output->GetInputSlot(0)); + + armnn::TensorShape shape({4}); + armnn::TensorInfo info(shape, armnn::DataType::Float32); + input->GetOutputSlot(0).SetTensorInfo(info); + add->GetOutputSlot(0).SetTensorInfo(info); + + armnn::DeviceSpec spec; + spec.DefaultComputeDevice = armnn::Compute::CpuAcc; + armnn::IOptimizedNetworkPtr optimizedNet = armnn::Optimize(net, spec); + + std::ostringstream ss; + optimizedNet->SerializeToDot(ss); + + auto inputId = input->GetGuid(); + auto addId = add->GetGuid(); + auto outputId = output->GetGuid(); + + std::stringstream expected; + expected << + "digraph Optimized {\n" + " node [shape=\"record\"];\n" + " edge [fontsize=8 fontcolor=\"blue\" fontname=\"arial-bold\"];\n" + " " << inputId << " [label=\"{Input}\"];\n" + " " << addId << " [label=\"{Addition}\"];\n" + " " << outputId << " [label=\"{Output}\"];\n" + " " << inputId << " -> " << addId << " [label=< [4] >];\n" + " " << inputId << " -> " << addId << " [label=< [4] >];\n" + " " << addId << " -> " << outputId << " [label=< [4] >];\n" + "}\n"; + + BOOST_TEST(ss.str() == expected.str()); +} + BOOST_AUTO_TEST_CASE(NetworkBasic) { armnn::Network net; diff --git a/src/armnn/test/OptimizerTests.cpp b/src/armnn/test/OptimizerTests.cpp new file mode 100644 index 0000000..da26fba --- /dev/null +++ b/src/armnn/test/OptimizerTests.cpp @@ -0,0 +1,334 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// +#include + +#include "armnn/ArmNN.hpp" +#include "Graph.hpp" +#include "Optimizer.hpp" + +namespace +{ +template +bool IsLayerOfType(const armnn::Layer* const layer) +{ + return (layer->GetType() == armnn::LayerEnumOf()); +} + +bool CheckSequence(const armnn::Graph::ConstIterator first, const armnn::Graph::ConstIterator last) +{ + return (first == last); +} + +/// Check each unary function in Us evaluates true for each correspondent layer in the sequence [first, last) +template +bool CheckSequence(const armnn::Graph::ConstIterator first, + const armnn::Graph::ConstIterator last, + U&& u, + Us&&... us) +{ + return u(*first) && CheckSequence(std::next(first), last, us...); +} +} + +BOOST_AUTO_TEST_SUITE(Optimizer) + +BOOST_AUTO_TEST_CASE(OptimizeInversePermutes) +{ + armnn::Graph graph; + + auto output = graph.AddLayer(0, "output"); + + graph.InsertNewLayer(output->GetInputSlot(0), 0, "input"); + + // Insert two permutes, one the inverse of the other + graph.InsertNewLayer(output->GetInputSlot(0), + armnn::PermuteDescriptor({0, 2, 3, 1}), + "perm0231"); + graph.InsertNewLayer(output->GetInputSlot(0), + armnn::PermuteDescriptor({0, 3, 1, 2}), + "perm0312"); + + BOOST_TEST(CheckSequence(graph.cbegin(), + graph.cend(), + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType)); + + armnn::Optimizer::Optimize(graph); + + // The permutes are removed + BOOST_TEST(CheckSequence(graph.cbegin(), + graph.cend(), + &IsLayerOfType, + &IsLayerOfType)); +} + +BOOST_AUTO_TEST_CASE(MovePermuteUp) +{ + const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32); + const armnn::TensorInfo permuted({ 1, 3, 5, 2 }, armnn::DataType::Float32); + + armnn::Graph graph; + + armnn::LayerBindingId inputId = 0; + + armnn::Layer* head = graph.AddLayer(0, "output"); + + // Insert permute + head = graph.InsertNewLayer(head->GetInputSlot(0), + armnn::PermuteDescriptor({ 0, 2, 3, 1 }), ""); + head->GetOutputHandler().SetTensorInfo(permuted); + + // Insert layers that don't care about data format + head = graph.InsertNewLayer(head->GetInputSlot(0), + armnn::ActivationDescriptor{}, ""); + head->GetOutputHandler().SetTensorInfo(info); + + head = graph.InsertNewLayer(head->GetInputSlot(0), ""); + head->GetOutputHandler().SetTensorInfo(info); + + // Insert input for 2nd input of Addition + graph.InsertNewLayer(head->GetInputSlot(1), inputId++, "") + ->GetOutputHandler().SetTensorInfo(info); + + head = graph.InsertNewLayer(head->GetInputSlot(0), + armnn::FakeQuantizationDescriptor{}, ""); + head->GetOutputHandler().SetTensorInfo(info); + + head = graph.InsertNewLayer(head->GetInputSlot(0), ""); + head->GetOutputHandler().SetTensorInfo(info); + + head = graph.InsertNewLayer(head->GetInputSlot(0), ""); + head->GetOutputHandler().SetTensorInfo(info); + + head = graph.InsertNewLayer(head->GetInputSlot(0), ""); + head->GetOutputHandler().SetTensorInfo(info); + + // Insert input for 2nd input of Multiplication + graph.InsertNewLayer(head->GetInputSlot(1), inputId++, "") + ->GetOutputHandler().SetTensorInfo(info); + + // Insert input + graph.InsertNewLayer(head->GetInputSlot(0), inputId++, "") + ->GetOutputHandler().SetTensorInfo(info); + + BOOST_TEST(CheckSequence(graph.cbegin(), + graph.cend(), + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType)); + + armnn::Optimizer::Optimize(graph); + + // The permute is moved to the top. New permutes for layers with multiple inputs + BOOST_TEST(CheckSequence(graph.cbegin(), + graph.cend(), + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType)); +} + +BOOST_AUTO_TEST_CASE(PermuteAsReshape) +{ + armnn::Graph graph; + + const armnn::TensorInfo infoIn({ 1, 2, 3, 1 }, armnn::DataType::Float32); + const armnn::TensorInfo infoOut({ 1, 1, 2, 3 }, armnn::DataType::Float32); + + auto output = graph.AddLayer(0, "output"); + + graph.InsertNewLayer(output->GetInputSlot(0), 0, "input") + ->GetOutputHandler().SetTensorInfo(infoIn); + + // Insert permute + graph.InsertNewLayer(output->GetInputSlot(0), + armnn::PermuteDescriptor({ 0, 2, 3, 1 }), "") + ->GetOutputHandler().SetTensorInfo(infoOut); + + BOOST_TEST(CheckSequence(graph.cbegin(), + graph.cend(), + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType)); + + armnn::Optimizer::Optimize(graph); + + // The permute is replaced by an equivalent reshape. + + auto checkReshape = [&infoOut](const armnn::Layer* const layer) -> bool + { + const auto reshapeLayer = static_cast(layer); + return IsLayerOfType(layer) && + (reshapeLayer->GetParameters().m_TargetShape == infoOut.GetShape()) && + (reshapeLayer->GetOutputHandler().GetTensorInfo().GetShape() == infoOut.GetShape()); + }; + + BOOST_TEST(CheckSequence(graph.cbegin(), + graph.cend(), + &IsLayerOfType, + checkReshape, + &IsLayerOfType)); +} + +BOOST_AUTO_TEST_CASE(OptimizeConsecutiveReshapes) +{ + armnn::Graph graph; + + const armnn::TensorInfo info0({ 1, 2, 3, 5 }, armnn::DataType::Float32); + + auto output = graph.AddLayer(0, "output"); + auto input = graph.InsertNewLayer(output->GetInputSlot(0), 0, "input"); + + input->GetOutputHandler().SetTensorInfo(info0); + + { + // Insert two reshapes + const armnn::TensorInfo info1({1, 30, 1, 1}, armnn::DataType::Float32); + const armnn::TensorInfo info2({1, 2, 1, 15}, armnn::DataType::Float32); + + auto reshape1 = graph.InsertNewLayer(output->GetInputSlot(0), + armnn::ReshapeDescriptor{ info1.GetShape() }, + "reshape1"); + auto reshape2 = graph.InsertNewLayer(output->GetInputSlot(0), + armnn::ReshapeDescriptor{ info2.GetShape() }, + "reshape2"); + + reshape1->GetOutputHandler().SetTensorInfo(info1); + reshape2->GetOutputHandler().SetTensorInfo(info2); + + BOOST_TEST(CheckSequence(graph.cbegin(), + graph.cend(), + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType)); + + armnn::Optimizer::Optimize(graph); + + auto checkReshape = [&info2](const armnn::Layer* const layer) -> bool + { + const auto reshapeLayer = static_cast(layer); + return IsLayerOfType(layer) && + (reshapeLayer->GetParameters().m_TargetShape == info2.GetShape()) && + (reshapeLayer->GetOutputHandler().GetTensorInfo().GetShape() == info2.GetShape()); + }; + + // The two reshapes are replaced by a single equivalent reshape + BOOST_TEST(CheckSequence(graph.cbegin(), + graph.cend(), + &IsLayerOfType, + checkReshape, + &IsLayerOfType)); + } + + { + // Insert a reshape to the input shape + auto reshapeToIn = graph.InsertNewLayer(output->GetInputSlot(0), + armnn::ReshapeDescriptor{ info0.GetShape() }, + "reshapeToIn"); + + reshapeToIn->GetOutputHandler().SetTensorInfo(info0); + + armnn::Optimizer::Optimize(graph); + + // The two reshapes are removed + BOOST_TEST(CheckSequence(graph.cbegin(), + graph.cend(), + &IsLayerOfType, + &IsLayerOfType)); + } +} + +BOOST_AUTO_TEST_CASE(SquashEqualSiblings) +{ + armnn::Graph graph; + + armnn::LayerBindingId outputId = 0; + + const armnn::TensorInfo info({ 1, 2, 3, 5 }, armnn::DataType::Float32); + const armnn::TensorInfo permuted({ 1, 5, 2, 3 }, armnn::DataType::Float32); + + auto input = graph.AddLayer(0, "input"); + input->GetOutputSlot().SetTensorInfo(info); + + // Insert equal permutes, equal reshapes and something else + const armnn::PermuteDescriptor permDesc({ 0, 2, 3, 1 }); + const armnn::ReshapeDescriptor reshapeDesc{ { 1, 3, 1, 5 } }; + + armnn::Layer* layer; + + layer = graph.AddLayer(permDesc, ""); + layer->GetOutputSlot().SetTensorInfo(permuted); + layer->GetOutputSlot().Connect(graph.AddLayer(outputId++, "")->GetInputSlot(0)); + input->GetOutputSlot().Connect(layer->GetInputSlot(0)); + + layer = graph.AddLayer(reshapeDesc, ""); + layer->GetOutputSlot().Connect(graph.AddLayer(outputId++, "")->GetInputSlot(0)); + input->GetOutputSlot().Connect(layer->GetInputSlot(0)); + + layer = graph.AddLayer(""); + layer->GetOutputSlot().Connect(graph.AddLayer(outputId++, "")->GetInputSlot(0)); + input->GetOutputSlot().Connect(layer->GetInputSlot(0)); + + layer = graph.AddLayer(reshapeDesc, ""); + layer->GetOutputSlot().Connect(graph.AddLayer(outputId++, "")->GetInputSlot(0)); + input->GetOutputSlot().Connect(layer->GetInputSlot(0)); + + layer = graph.AddLayer(permDesc, ""); + layer->GetOutputSlot().SetTensorInfo(permuted); + layer->GetOutputSlot().Connect(graph.AddLayer(outputId++, "")->GetInputSlot(0)); + input->GetOutputSlot().Connect(layer->GetInputSlot(0)); + + BOOST_TEST(CheckSequence(graph.cbegin(), + graph.cend(), + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType)); + + armnn::Optimizer::Optimize(graph); + + // The permutes and reshapes are squashed. + + BOOST_TEST(CheckSequence(graph.cbegin(), + graph.cend(), + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType, + &IsLayerOfType)); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnn/test/RuntimeTests.cpp b/src/armnn/test/RuntimeTests.cpp index 117df5e..e42d71c 100644 --- a/src/armnn/test/RuntimeTests.cpp +++ b/src/armnn/test/RuntimeTests.cpp @@ -115,7 +115,7 @@ BOOST_AUTO_TEST_CASE(RuntimeMemoryUsage) BOOST_TEST(leakedBefore == leakedAfter); // Add resonable threshold after and before running valgrind with the ACL clear cache function. - BOOST_TEST(reachableAfter - reachableBefore < 30000); + BOOST_TEST(static_cast(reachableAfter) - static_cast(reachableBefore) < 1024); // these are needed because VALGRIND_COUNT_LEAKS is a macro that assigns to the parameters // so they are assigned to, but still considered unused, causing a warning @@ -178,7 +178,18 @@ BOOST_AUTO_TEST_CASE(RuntimeMemoryLeak) // if we're not running under Valgrind, these vars will have been initialised to 0, so this will always pass BOOST_TEST(leakedBefore == leakedAfter); - BOOST_TEST(reachableBefore == reachableAfter); + + #if defined(ARMCOMPUTECL_ENABLED) + // reachableBefore == reachableAfter should hold, but on OpenCL with Android we are still + // not entirely able to control the memory in the OpenCL driver. Testing is showing that + // after this test (which clears all OpenCL memory) we are clearing a little bit more than + // we expect, probably depending on the order in which other tests are run. + BOOST_TEST(reachableBefore - reachableAfter <= 24); + #else + BOOST_TEST(reachableBefore == reachableAfter); + #endif + + BOOST_TEST(reachableBefore >= reachableAfter); // these are needed because VALGRIND_COUNT_LEAKS is a macro that assigns to the parameters // so they are assigned to, but still considered unused, causing a warning diff --git a/src/armnnCaffeParser/CaffeSupport.md b/src/armnnCaffeParser/CaffeSupport.md new file mode 100644 index 0000000..e772480 --- /dev/null +++ b/src/armnnCaffeParser/CaffeSupport.md @@ -0,0 +1,31 @@ +#Caffe layers supported by the Arm NN SDK +This reference guide provides a list of Caffe layers the Arm NN SDK currently supports. + +Although some other neural networks might work, Arm tests the Arm NN SDK with Caffe implementations of the following neural networks: + +- AlexNet. +- Cifar10. +- Inception-BN. +- Resnet_50, Resnet_101 and Resnet_152. +- VGG_CNN_S, VGG_16 and VGG_19. +- Yolov1_tiny. +- Lenet. +- MobileNetv1. + +The Arm NN SDK supports the following machine learning layers for Caffe networks: + + +- BatchNorm, in inference mode. +- Convolution, excluding the Dilation Size, Weight Filler, Bias Filler, Engine, Force nd_im2col, and Axis parameters. +- Eltwise, excluding the coeff parameter. +- Inner Product, excluding the Weight Filler, Bias Filler, Engine, and Axis parameters. +- Input. +- LRN, excluding the Engine parameter. +- Pooling, excluding the Stochastic Pooling and Engine parameters. +- ReLU. +- Scale. +- Softmax, excluding the Axis and Engine parameters. +- Split. +- Dropout, in inference mode. + +More machine learning layers will be supported in future releases. \ No newline at end of file diff --git a/src/armnnCaffeParser/README.md b/src/armnnCaffeParser/README.md new file mode 100644 index 0000000..92d7d0a --- /dev/null +++ b/src/armnnCaffeParser/README.md @@ -0,0 +1,5 @@ +#Arm NN Caffe parser + +`armnnCaffeParser` is a library for loading neural networks defined in Caffe protobuf files into the Arm NN runtime. + +For more information about the Caffe layers that are supported, and the networks that have been tested, see [CaffeSupport.md](./CaffeSupport.md). \ No newline at end of file diff --git a/src/armnnTfParser/README.md b/src/armnnTfParser/README.md new file mode 100644 index 0000000..fe3f2b8 --- /dev/null +++ b/src/armnnTfParser/README.md @@ -0,0 +1,5 @@ +#The Arm NN TensorFlow parser + +`armnnTfParser` is a library for loading Neural Networks defined by TensorFlow protobuf files into the Arm NN runtime. + +For more information about the TensorFlow operators that are supported, and the networks that have been tested, see [TensorFlowSupport.md](./TensorFlowSupport.md) \ No newline at end of file diff --git a/src/armnnTfParser/TensorFlowSupport.md b/src/armnnTfParser/TensorFlowSupport.md new file mode 100644 index 0000000..d052a70 --- /dev/null +++ b/src/armnnTfParser/TensorFlowSupport.md @@ -0,0 +1,111 @@ +#TensorFlow operators that the Arm NN SDK supports + +This reference guide provides a list of TensorFlow operators the Arm NN SDK currently supports. + +The Arm NN SDK TensorFlow parser currently only supports fp32 operators. + +These are the TensorFlow operators that the Arm NN SDK currently supports: + +**avg_pool** + +See the TensorFlow [avg_pool documentation](https://www.tensorflow.org/api_docs/python/tf/nn/avg_pool) for more information. + +**bias_add** + + See the TensorFlow [bias_add documentation](https://www.tensorflow.org/api_docs/python/tf/nn/bias_add) for more information. + +**conv2d** + + See the TensorFlow [conv2d documentation](https://www.tensorflow.org/api_docs/python/tf/nn/conv2d) for more information. + +**identity** + +See the TensorFlow [identity documentation](https://www.tensorflow.org/api_docs/python/tf/identity) for more information. + +**local_response_normalization** + +See the TensorFlow [local_response_normalization documentation](https://www.tensorflow.org/api_docs/python/tf/nn/local_response_normalization) for more information. + +**max_pool** + +See the TensorFlow [max_pool documentation](https://www.tensorflow.org/api_docs/python/tf/nn/max_pool) for more information. + +**relu** + + See the TensorFlow [relu documentation](https://www.tensorflow.org/api_docs/python/tf/nn/relu) for more information. + +**relu6** + + See the TensorFlow [relu6 documentation](https://www.tensorflow.org/api_docs/python/tf/nn/relu6) for more information. + +**shape** + + See the TensorFlow [shape documentation](https://www.tensorflow.org/api_docs/python/tf/shape) for more information. + +**sigmoid** + + See the TensorFlow [sigmoid documentation](https://www.tensorflow.org/api_docs/python/tf/sigmoid) for more information. + +**softplus** + +See the TensorFlow [softplus documentation](https://www.tensorflow.org/api_docs/python/tf/nn/softplus) for more information. + +**squeeze** + +See the TensorFlow [squeeze documentation](https://www.tensorflow.org/api_docs/python/tf/squeeze) for more information. + +**tanh** + +See the TensorFlow [tanh documentation](https://www.tensorflow.org/api_docs/python/tf/tanh) for more information. + +The Arm NN SDK TensorFlow parser currently partially supports: + +**add** + +The parser does not support all forms of [broadcast composition](https://www.tensorflow.org/performance/xla/broadcasting), only broadcasting of scalars and 1D tensors. See the TensorFlow [add operator documentation](https://www.tensorflow.org/api_docs/python/tf/add) for more information. + +**depthwise_conv2D_native** + +The parser only supports a dilation rate of (1,1,1,1). See the TensorFlow [depthwise_conv2d_native documentation](https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d_native) for more information. + +**fused_batch_norm** + +The parser does not support training outputs. See the TensorFlow [fused_batch_norm documentation](https://www.tensorflow.org/api_docs/python/tf/nn/fused_batch_norm) for more information. + +**matmul** + +The parser only supports constant weights in a fully connected layer. See the TensorFlow [matmul documentation](https://www.tensorflow.org/api_docs/python/tf/matmul) for more information. + +**multiply** + +The parser does not support all forms of [broadcast composition](https://www.tensorflow.org/performance/xla/broadcasting), only broadcasting of scalars and 1D tensors. See the TensorFlow [multiply documentation](https://www.tensorflow.org/api_docs/python/tf/multiply) for more information. No broadcasting supported on the NEON backend. + +**placeholder** + + The parser only supports the NHWC data format in the input layer. See the TensorFlow [placeholder documentation](https://www.tensorflow.org/api_docs/python/tf/placeholder) for more information. + +**reshape** + +The parser does not support reshaping to or from 4D. See the TensorFlow [reshape documentation](https://www.tensorflow.org/api_docs/python/tf/reshape) for more information. + +**resize_images** + +The parser only supports `ResizeMethod.BILINEAR`. See the TensorFlow [resize_images documentation](https://www.tensorflow.org/api_docs/python/tf/image/resize_images) for more information. + +**softmax** + +The parser only supports 2D inputs and does not support selecting the `softmax` dimension. See the TensorFlow [softmax documentation](https://www.tensorflow.org/api_docs/python/tf/nn/softmax) for more information. + + + +Arm tests these operators with the following TensorFlow fp32 neural networks: + +* Cifar10. + +* Lenet. + +* mobilenet_v1_1.0_224. The Arm NN SDK only supports the non*_quant version of the network. See the [MobileNet_v1 documentation](https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet_v1.md) for more information on _quant networks. + +* inception_v3. The Arm NN SDK only supports the official inception_v3 transformed model using the GPU acceleration only, but NEON acceleration is not supported at the moment. See the TensorFlow documentation on [preparing models for mobile deployment](https://www.tensorflow.org/mobile/prepare_models) for more information on how to transform the inception_v3 network. + +More machine learning operators will be supported in future releases. diff --git a/src/armnnTfParser/TfParser.cpp b/src/armnnTfParser/TfParser.cpp new file mode 100644 index 0000000..7c8e01b --- /dev/null +++ b/src/armnnTfParser/TfParser.cpp @@ -0,0 +1,2200 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// +#include "TfParser.hpp" + +#include +#include +#include +#include +#include + +#include +#include + +#include +#include + +#include "tensorflow/core/framework/graph.pb.h" +#include "tensorflow/core/framework/node_def.pb.h" +#include "tensorflow/core/framework/types.pb.h" +#include "tensorflow/core/framework/tensor.pb.h" +#include "tensorflow/core/framework/tensor_shape.pb.h" + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +using namespace armnn; + +namespace armnnTfParser +{ +namespace +{ + +const PermutationVector NHWCToArmNN = { 0, 2, 3, 1 }; +const PermutationVector ArmNNToNHWC = { 0, 3, 1, 2 }; + +IConnectableLayer* AddSwizzleLayer(INetwork& network, IOutputSlot& input, const PermutationVector& mapping, + const std::string& name) +{ + // Add swizzle layer + IConnectableLayer* const layer = network.AddPermuteLayer(mapping, name.c_str()); + + // Connect intput to swizzle layer + input.Connect(layer->GetInputSlot(0)); + + // Setup swizzled output + const TensorInfo outInfo = armnnUtils::Permuted(input.GetTensorInfo(), mapping); + layer->GetOutputSlot(0).SetTensorInfo(outInfo); + + return layer; +} + +IConnectableLayer* SwizzleInDeswizzleOut(INetwork& network, IOutputSlot& input, IConnectableLayer& layer, + const std::string& name) +{ + // Add swizzle layer + IConnectableLayer* const swizzleLayer = AddSwizzleLayer(network, input, NHWCToArmNN, "swizzle_for-" + name); + + // Connect swizzledInput to layer + swizzleLayer->GetOutputSlot(0).Connect(layer.GetInputSlot(0)); + + // Add deswizzle layer + IConnectableLayer* const deswizzleLayer = AddSwizzleLayer(network, layer.GetOutputSlot(0), ArmNNToNHWC, + "deswizzle_for-" + name); + + return deswizzleLayer; +} + +template +void ReadMandatoryNodeAttributeImpl(const tensorflow::NodeDef& nodeDef, + const std::string& attribName, + tensorflow::AttrValue::ValueCase expectedValueCase, + Callable callable) +{ + auto iter = nodeDef.attr().find(attribName); + if (iter != nodeDef.attr().end()) + { + const auto& attrValue = iter->second; + if (attrValue.value_case() == expectedValueCase) + { + callable(attrValue); + } + else + { + throw ParseException(boost::str(boost::format( + "Attribute %1% of node %2% expected to have %3% as tensorflow::AttrValue::ValueCase, " + "but found %4% instead") + % attribName + % nodeDef.name() + % static_cast(expectedValueCase) + % static_cast(attrValue.value_case()))); + } + } + else + { + throw ParseException(boost::str(boost::format("Could not find required attribute %1% in node %2%") + % attribName % nodeDef.name())); + } +} + +template +void ReadOptionalNodeAttributeImpl(const tensorflow::NodeDef& nodeDef, + const std::string& attribName, + tensorflow::AttrValue::ValueCase expectedValueCase, + Callable callable) +{ + auto iter = nodeDef.attr().find(attribName); + if (iter != nodeDef.attr().end()) + { + const auto& attrValue = iter->second; + if (attrValue.value_case() == expectedValueCase) + { + callable(attrValue); + } + else + { + throw ParseException(boost::str(boost::format( + "Attribute %1% of node %2% expected to have %3% as tensorflow::AttrValue::ValueCase, " + "but found %4% instead") + % attribName + % nodeDef.name() + % static_cast(expectedValueCase) + % static_cast(attrValue.value_case()))); + } + } +} + +float ReadMandatoryNodeFloatAttribute(const tensorflow::NodeDef& nodeDef, const std::string& name) +{ + float attribValue = 0.0f; + ReadMandatoryNodeAttributeImpl(nodeDef, name, tensorflow::AttrValue::kF, + [&attribValue](const tensorflow::AttrValue& attrValue) + { + attribValue = attrValue.f(); + }); + return attribValue; +} + +uint32_t ReadMandatoryNodeUint32Attribute(const tensorflow::NodeDef& nodeDef, const std::string& name) +{ + uint32_t attribValue = 0u; + ReadMandatoryNodeAttributeImpl(nodeDef, name, tensorflow::AttrValue::kI, + [&attribValue](const tensorflow::AttrValue& attrValue) + { + attribValue = static_cast(attrValue.i()); + }); + return attribValue; +} + +std::string ReadMandatoryNodeStringAttribute(const tensorflow::NodeDef& nodeDef, const std::string& name) +{ + std::string attribValue = ""; + ReadMandatoryNodeAttributeImpl(nodeDef, name, tensorflow::AttrValue::kS, + [&attribValue](const tensorflow::AttrValue& attrValue) + { + attribValue = attrValue.s(); + }); + return attribValue; +} + +std::vector ReadMandatoryNodeUint32ListAttribute(const tensorflow::NodeDef& nodeDef, + const std::string& name) +{ + std::vector attriList; + ReadMandatoryNodeAttributeImpl(nodeDef, name, tensorflow::AttrValue::kList, + [&attriList](const tensorflow::AttrValue& attrValue) + { + for (int attriNum = 0; attriNum < attrValue.list().i_size(); ++attriNum) + { + attriList.push_back(static_cast(attrValue.list().i(attriNum))); + } + }); + + return attriList; +} + +std::vector ReadOptionalNodeUint32ListAttribute(const tensorflow::NodeDef& nodeDef, + const std::string& name) +{ + std::vector attriList; + ReadOptionalNodeAttributeImpl(nodeDef, name, tensorflow::AttrValue::kList, + [&attriList](const tensorflow::AttrValue& attrValue) + { + for (int attriNum = 0; attriNum < attrValue.list().i_size(); ++attriNum) + { + attriList.push_back(static_cast(attrValue.list().i(attriNum))); + } + }); + + return attriList; +} + +bool ReadOptionalNodeBoolAttribute(const tensorflow::NodeDef& nodeDef, + const std::string& name, + bool defaultValue = false) +{ + bool attribValue = defaultValue; + ReadOptionalNodeAttributeImpl(nodeDef, name, tensorflow::AttrValue::kB, + [&attribValue](const tensorflow::AttrValue& attrValue) + { + attribValue = attrValue.b(); + }); + return attribValue; +} + +tensorflow::DataType ReadMandatoryNodeTypeAttribute(const tensorflow::NodeDef& nodeDef, const std::string& name) +{ + tensorflow::DataType attribValue = tensorflow::DT_INVALID; + ReadMandatoryNodeAttributeImpl(nodeDef, name, tensorflow::AttrValue::kType, + [&attribValue](const tensorflow::AttrValue& attrValue) + { + attribValue = attrValue.type(); + }); + return attribValue; +} + +TensorInfo PrepareReshape(const TensorInfo& input, const std::vector& targetDims) +{ + std::vector outDims(targetDims.begin(), targetDims.end()); + const auto stretchDim = std::find(targetDims.begin(), targetDims.end(), -1); + + if (stretchDim != targetDims.end()) + { + if (std::find(std::next(stretchDim), targetDims.end(), -1) != targetDims.end()) + { + throw ParseException("At most one component of shape can be -1"); + } + + auto targetNumElements = boost::numeric_cast(std::accumulate(targetDims.begin(), targetDims.end(), + -1, std::multiplies())); + auto stretchIndex = static_cast(std::distance(targetDims.begin(), stretchDim)); + outDims[stretchIndex] = input.GetNumElements() / targetNumElements; + } + + TensorInfo reshapeInfo = input; + reshapeInfo.SetShape(TensorShape{ static_cast(outDims.size()), outDims.data() }); + + return reshapeInfo; +} + +// We need the input0Slot to guide the reshape for input1Slot +IOutputSlot* BroadcastForAddandMul(IOutputSlot* input0Slot, IOutputSlot* input1Slot, bool isNHWC, INetwork& m_Network, + const tensorflow::NodeDef& nodeDef) +{ + const TensorInfo& input1Info = input1Slot->GetTensorInfo(); + const TensorInfo inputTensorInfo = input0Slot->GetTensorInfo(); + const unsigned int matchDim = inputTensorInfo.GetNumDimensions() - (isNHWC ? 1 : 3); + std::array reshapedDimensions; + std::fill_n(reshapedDimensions.begin(), inputTensorInfo.GetNumDimensions(), 1); + reshapedDimensions[matchDim] = input1Info.GetShape()[0]; + + armnn::TensorInfo reshapedInfo = input1Info; + reshapedInfo.SetShape(TensorShape{ inputTensorInfo.GetNumDimensions(), reshapedDimensions.data() }); + + const std::string reshapeLayerName = "reshape_for-" + nodeDef.name(); + ReshapeDescriptor reshapeDesc; + reshapeDesc.m_TargetShape = reshapedInfo.GetShape(); + IConnectableLayer* const reshapeLayer = m_Network.AddReshapeLayer(reshapeDesc, reshapeLayerName.c_str()); + + input1Slot->Connect(reshapeLayer->GetInputSlot(0)); + reshapeLayer->GetOutputSlot(0).SetTensorInfo(reshapedInfo); + + input1Slot = &reshapeLayer->GetOutputSlot(0); + + return input1Slot; +} + +OutputId ParseOutputId(const std::string & name) +{ + unsigned int outputNum = 0; + size_t colonPos = name.find_last_of(":"); + if (colonPos != std::string::npos) + { + int n = std::stoi(name.substr(colonPos+1)); + if (n<0 || n>100) + { + throw ParseException("Output tensor id is out of range for "+name); + } + outputNum = static_cast(n); + } + return OutputId(name.substr(0,colonPos),outputNum); +} + +} // namespace + +const std::map TfParser::ms_OperationNameToParsingFunctions = { + { "Const", &TfParser::ParseConst }, + { "Add", &TfParser::ParseAdd }, + { "BiasAdd", &TfParser::ParseBiasAdd }, + { "Identity", &TfParser::ParseIdentity }, + { "Conv2D", &TfParser::ParseConv2D }, + { "DepthwiseConv2dNative", &TfParser::ParseDepthwiseConv2D }, + { "FusedBatchNorm", &TfParser::ParseFusedBatchNorm }, + { "ConcatV2", &TfParser::ParseConcat }, + { "LRN", &TfParser::ParseLrn }, + { "MatMul", &TfParser::ParseMatMul }, + { "Mul", &TfParser::ParseMul }, + { "Placeholder", &TfParser::ParsePlaceholder }, + { "Relu", &TfParser::ParseRelu }, + { "Relu6", &TfParser::ParseRelu6 }, + { "Reshape", &TfParser::ParseReshape }, + { "ResizeBilinear", &TfParser::ParseResizeBilinear }, + { "Shape", &TfParser::ParseShape }, + { "Squeeze", &TfParser::ParseSqueeze }, + { "Sigmoid", &TfParser::ParseSigmoid }, + { "Softmax", &TfParser::ParseSoftmax }, + { "Softplus", &TfParser::ParseSoftplus }, + { "Tanh", &TfParser::ParseTanh }, + { "MaxPool", &TfParser::ParseMaxPool }, + { "AvgPool", &TfParser::ParseAvgPool }, +}; + +ITfParser* ITfParser::CreateRaw() +{ + return new TfParser(); +} + +ITfParserPtr ITfParser::Create() +{ + return ITfParserPtr(CreateRaw(), &ITfParser::Destroy); +} + +void ITfParser::Destroy(ITfParser* parser) +{ + delete parser; +} + +inline void CalculateSamePadding(uint32_t inputSize, uint32_t stride, + uint32_t filterSize, bool samePadding, + uint32_t* paddingFront, uint32_t* paddingBack) { + *paddingFront = 0; + *paddingBack = 0; + + if (samePadding) { + uint32_t outputSize = (inputSize + stride - 1) / stride; + uint32_t temp = (outputSize - 1) * stride + filterSize; + if (temp > inputSize) { + *paddingFront = (temp - inputSize) / 2; + *paddingBack = (temp - inputSize) - *paddingFront; + } + } +} + +void CalcPadding(uint32_t input, uint32_t kernel, uint32_t stride, uint32_t& outPadHead, uint32_t& outPadTail, + bool samePadding) +{ + CalculateSamePadding(input, stride, kernel, samePadding, &outPadHead, &outPadTail); +} + +/// An Abstract base class which represents a single tensorflow operation (node) +/// that has been (potentially partially) converted to Armnn. +/// It may not yet have been fully converted into actual Armnn layers. +class ParsedTfOperation +{ +public: + ParsedTfOperation(TfParser* parser, const tensorflow::NodeDef& node) + : m_Parser(parser) + , m_Node(node) + { + } + + virtual ~ParsedTfOperation() {}; + + const tensorflow::NodeDef& GetNode() const { return m_Node; } + + /// Gets the ArmNN IOutputSlot corresponding to the given output index of the Tensorflow operation. + /// This may result in the creation of Armnn layers if this was deferred (e.g. see ParsedConstTfOperation). + virtual IOutputSlot& ResolveArmnnOutputSlot(unsigned int tfOutputIndex) = 0; + + /// If this operation is an Identity then this will follow return the 'parent' operation (recursively). + virtual ParsedTfOperation* ResolveIdentityOperations() + { + return this; + } + +protected: + TfParser* m_Parser; + const tensorflow::NodeDef& m_Node; +}; + +/// An ParsedTfOperation where the Armnn equivalent is a single layer, +/// with output slots that correspond directly to the Tf node outputs. +class SingleLayerParsedTfOperation : public ParsedTfOperation +{ +public: + SingleLayerParsedTfOperation(TfParser* parser, const tensorflow::NodeDef& node, IConnectableLayer* layer) + : ParsedTfOperation(parser, node) + , m_Layer(layer) + { + } + + IOutputSlot& ResolveArmnnOutputSlot(unsigned int tfOutputIndex) override + { + BOOST_ASSERT(m_Layer); + // Assume one-to-one mapping between Tf and armnn output slots. + unsigned int armnnOutputSlotIdx = tfOutputIndex; + if (armnnOutputSlotIdx >= m_Layer->GetNumOutputSlots()) + { + throw ParseException( + boost::str(boost::format("The requested output slot #%1% " + "for %2% does not exist") % armnnOutputSlotIdx % m_Layer->GetName())); + } + return m_Layer->GetOutputSlot(armnnOutputSlotIdx); + } + +protected: + IConnectableLayer* m_Layer; +}; + +/// A SingleLayerParsedTfOperation for deferred layer creation +class DeferredSingleLayerParsedTfOperation : public SingleLayerParsedTfOperation +{ +public: + DeferredSingleLayerParsedTfOperation(TfParser* parser, const tensorflow::NodeDef& node) + : SingleLayerParsedTfOperation(parser, node, nullptr) + { + } + + IOutputSlot& ResolveArmnnOutputSlot(unsigned int tfOutputIndex) override + { + if (!m_Layer) + { + CreateLayerDeferred(); + } + return SingleLayerParsedTfOperation::ResolveArmnnOutputSlot(tfOutputIndex); + } + +private: + virtual void CreateLayerDeferred() = 0; +}; + + +TfParser::TfParser() + : m_Network(nullptr, nullptr) +{ +} + + +const tensorflow::NodeDef* TfParser::ResolveIdentityNode(const tensorflow::NodeDef* nodeDef) +{ + if (nodeDef->op() != "Identity") + { + return nodeDef; + } + + if (nodeDef->input_size() != 1) + { + throw ParseException("Identity node does not have correct amount of inputs!"); + } + + auto it = m_NodesByName.find(nodeDef->input(0)); + if (it != m_NodesByName.end()) + { + const tensorflow::NodeDef* inputNode = it->second; + return ResolveIdentityNode(inputNode); + } + else + { + throw ParseException("Cannot find what the Identity node is linked to!"); + } +} + +std::vector +TfParser::GetTfInputNodes(const tensorflow::NodeDef& nodeDef) const +{ + std::vector ret; + + ret.reserve(boost::numeric_cast(nodeDef.input_size())); + for (int j = 0; j < nodeDef.input_size(); ++j) + { + OutputId outputId = ParseOutputId(nodeDef.input(j)); + auto inputIt = m_NodesByName.find(outputId.m_IndexedValue); + if (inputIt == m_NodesByName.end()) + { + throw ParseException( + "Can't find node '" + nodeDef.input(j) + + "', which is listed as an input of '" + nodeDef.name() + "'"); + } + ret.push_back(OutputOfConstNodeDef(inputIt->second,outputId.m_Index)); + } + + return ret; +} + +std::vector +TfParser::GetInputParsedTfOperationsChecked(const tensorflow::NodeDef& nodeDef, + std::size_t expectedNumInputs) +{ + // Fetch the tensorflow nodes connected as inputs and validate the size. + std::vector nodes = GetTfInputNodes(nodeDef); + const std::size_t numInputs = nodes.size(); + if (numInputs != expectedNumInputs) + { + throw ParseException(boost::str(boost::format("Unexpected number of inputs for node %1%. " + "Expected %2%, found %3%") % nodeDef.name() % expectedNumInputs % numInputs)); + } + // Fetch the corresponding ParsedTfOperation operations + std::vector result; + for (auto&& node : nodes) + { + auto it = m_ParsedTfOperations.find(node.m_IndexedValue->name()); + if (it == m_ParsedTfOperations.end()) + { + throw ParseException("Node with name '" + node.m_IndexedValue->name() + "' has not been parsed"); + } + ParsedTfOperation* parsedOp = it->second.get(); + // Transparently 'skip' any Identity operations. This simplifies the logic inside the ParseXXX() functions. + parsedOp = parsedOp->ResolveIdentityOperations(); + result.push_back(OutputOfParsedTfOperation(parsedOp,node.m_Index)); + } + return result; +} + +ParsedTfOperationPtr TfParser::ParseAdd(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef) +{ + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 2); + + // If one of the inputs is a MatMul and the other is a const, then we handle both nodes together as FullyConnected + if (inputs[0].m_IndexedValue->GetNode().op() == "MatMul" && + HasParsedConstTensor(inputs[1].m_IndexedValue->GetNode().name())) + { + IConnectableLayer* layer = + AddFullyConnectedLayer(inputs[0].m_IndexedValue->GetNode(), + &nodeDef,nodeDef.name().c_str()); + return std::make_unique(this, nodeDef, layer); + } + else if (HasParsedConstTensor(inputs[0].m_IndexedValue->GetNode().name()) && + inputs[1].m_IndexedValue->GetNode().op() == "MatMul") + { + IConnectableLayer* layer = + AddFullyConnectedLayer(inputs[1].m_IndexedValue->GetNode(), + &nodeDef,nodeDef.name().c_str()); + return std::make_unique(this, nodeDef, layer); + } + else + { + // Otherwise it's just a regular addition + return AddAdditionLayer(nodeDef); + } +} + +ParsedTfOperationPtr TfParser::ParseBiasAdd(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef) +{ + return AddAdditionLayer(nodeDef, true); +} + +/// An ParsedTfOperation which forwards to another (used for Identity nodes). +class ParsedIdentityTfOperation : public ParsedTfOperation +{ +public: + ParsedIdentityTfOperation(TfParser* parser, const tensorflow::NodeDef& node, ParsedTfOperation* representative) + : ParsedTfOperation(parser, node) + , m_Representative(representative) + { + } + + virtual IOutputSlot& ResolveArmnnOutputSlot(unsigned int tfOutputIndex) override + { + BOOST_ASSERT(m_Representative); + return m_Representative->ResolveArmnnOutputSlot(tfOutputIndex); + } + + virtual ParsedTfOperation* ResolveIdentityOperations() override + { + return m_Representative->ResolveIdentityOperations(); + } + +private: + ParsedTfOperation* m_Representative; +}; + +ParsedTfOperationPtr TfParser::ParseIdentity(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef) +{ + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 1); + // Any requests for the output slots of this node should be forwarded to the node connected as input. + return std::make_unique(this, nodeDef, inputs[0].m_IndexedValue); +} + +/// An ParsedTfOperation for a Const node. +/// Creation of the armnn ConstLayer is deferred until it is actually needed, because Const nodes are mostly used +/// for weight inputs to MatMul/Conv2D nodes and in these cases armnn doesn't need a ConstLayer. +template +class ParsedConstTfOperation : public DeferredSingleLayerParsedTfOperation +{ +public: + ParsedConstTfOperation(TfParser* parser, const tensorflow::NodeDef& node, + const T* tensorData, const TensorInfo& tensorInfo) + : DeferredSingleLayerParsedTfOperation(parser, node), + m_Storage(tensorData, tensorData + tensorInfo.GetNumElements()), + m_TensorInfo(tensorInfo) + { + BOOST_ASSERT(tensorInfo.GetDataType() == GetDataType()); + } + + void CreateLayerDeferred() override + { + BOOST_ASSERT(m_Layer == nullptr); + m_Layer = m_Parser->m_Network->AddConstantLayer(ConstTensor(m_TensorInfo, m_Storage), m_Node.name().c_str()); + m_Layer->GetOutputSlot(0).SetTensorInfo(m_TensorInfo); + } + + ConstTensor GetConstTensor(bool swizzleForConvolutionWeights, std::vector& outputTensorData) const + { + // Mappings from TensorFlow filter tensors to the ArmNN filter tensors. + // Tensorflow weights are [H, W, In, Out] + // ArmNN weights are [Out, In, H, W] + static const PermutationVector HWIOToOIHW = {2, 3, 1, 0}; + + const TensorInfo outInfo = swizzleForConvolutionWeights + ? armnnUtils::Permuted(m_TensorInfo, HWIOToOIHW) + : m_TensorInfo; + + outputTensorData.resize(m_TensorInfo.GetNumElements()); + + // Copy or swizzle from the permanent storage into the storage the caller provided. + if (swizzleForConvolutionWeights) + { + armnnUtils::Permute(outInfo.GetShape(), HWIOToOIHW, m_Storage.data(), outputTensorData.data()); + } + else + { + memcpy(outputTensorData.data(), m_Storage.data(), m_TensorInfo.GetNumBytes()); + } + // Update the result to point to the user provided storage + ConstTensor constTensor(outInfo, outputTensorData); + return constTensor; + } + +private: + ///< Manages the lifetime of the tensor data. + std::vector m_Storage; + ///< Describes the layout of the tensor and points to the data in m_Storage. + TensorInfo m_TensorInfo; +}; + +DataType ConvertTfTensorDataType(const tensorflow::DataType tfDataType) +{ + switch (tfDataType) + { + case tensorflow::DT_FLOAT: + return DataType::Float32; + break; + case tensorflow::DT_INT32: + return DataType::Signed32; + break; + default: + throw ParseException(boost::str( + boost::format("Unknown DataType %1% for node") + % tensorflow::DataType_Name(tfDataType))); + } +} + +struct ParseTfTensorValueList +{ + template + static void Parse( + const tensorflow::TensorProto& tfTensor, + unsigned int dstElements, + std::vector& outputData); + + template + static void ReadData(const void* srcData, unsigned int numSrcElements, + std::vector& dstData, unsigned int numDstElements) + { + // If there are no entries in the list, perform no action + if (numSrcElements == 0) + { + return; + } + + // If no size was provided, use the length of the value list + if (numDstElements == 0) + { + numDstElements = numSrcElements; + } + + // Allocate memory + dstData.resize(std::max(numSrcElements, numDstElements) * sizeof(DataType)); + + const DataType* srcTensor = reinterpret_cast(srcData); + DataType* dstTensor = reinterpret_cast(dstData.data()); + + // Copy the value list entries into the destination + std::copy(srcTensor, srcTensor + numSrcElements, dstTensor); + + if (numDstElements > numSrcElements) + { + // Use the last element in the list to fill the remaining entries + std::fill(dstTensor + numSrcElements, dstTensor + numDstElements, srcTensor[numSrcElements - 1]); + } + } + +}; + +template <> +void ParseTfTensorValueList::Parse(const tensorflow::TensorProto& tfTensor, + unsigned int dstElements, std::vector& outputData) +{ + ReadData(tfTensor.float_val().data(), static_cast(tfTensor.float_val_size()), + outputData, dstElements); +} + +template <> +void ParseTfTensorValueList::Parse(const tensorflow::TensorProto& tfTensor, + unsigned int dstElements, std::vector& outputData) +{ + ReadData(tfTensor.int_val().data(), static_cast(tfTensor.int_val_size()), + outputData, dstElements); +} + +template class OperatorType, typename T = int8_t> +struct MakeTfOperation +{ + template + inline static std::unique_ptr> Parse(TfParser* parser, const tensorflow::NodeDef& node, + Args&&... args) + { + return std::make_unique>(parser, node, std::forward(args)...); + } +}; + +template <> +struct MakeTfOperation +{ + template + inline static std::unique_ptr> Parse(TfParser* parser, + const tensorflow::NodeDef& node, const std::vector& tensorData, const TensorInfo& tensorInfo) + { + return std::make_unique>(parser, node, + reinterpret_cast(tensorData.data()), tensorInfo); + } +}; + +template +struct InvokeParseFunction +{ + template + inline static ResType Result(DataType dataType, Args&&... args) + { + if (dataType == DataType::Float32) + { + return FuncType::template Parse(std::forward(args)...); + } + else if (dataType == DataType::Signed32) + { + return FuncType::template Parse(std::forward(args)...); + } + + return ResType(); + } + + template + inline static void Result(DataType dataType, Args&&... args) + { + if (dataType == DataType::Float32) + { + FuncType::template Parse(std::forward(args)...); + } + else if (dataType == DataType::Signed32) + { + FuncType::template Parse(std::forward(args)...); + } + } +}; + +ParsedTfOperationPtr TfParser::ParseConst(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef) +{ + BOOST_ASSERT(nodeDef.op() == "Const"); + + if (nodeDef.attr().count("value") == 0) + { + throw ParseException(boost::str( + boost::format("Value not found for Const node - %1%") + % nodeDef.name())); + } + + const tensorflow::TensorProto& tfTensor = nodeDef.attr().at("value").tensor(); + const tensorflow::TensorShapeProto& tfTensorShape = tfTensor.tensor_shape(); + const tensorflow::DataType tfDataType = ReadMandatoryNodeTypeAttribute(nodeDef, "dtype"); + + const auto GetDimensionSize = [](auto& d) { return d.size(); }; + + std::vector dimensionSizes; + std::transform(tfTensorShape.dim().begin(), tfTensorShape.dim().end(), + std::back_inserter(dimensionSizes), GetDimensionSize); + + // Calculate number of elements + const DataType dataType = ConvertTfTensorDataType(tfDataType); + unsigned int numElements = 0U; + + if (!dimensionSizes.empty()) + { + numElements = std::accumulate(dimensionSizes.begin(), dimensionSizes.end(), + 1U, std::multiplies()); + } + + std::vector tensorData; + + // Get tensor data from the list of values attribute + if (tfTensor.tensor_content().empty()) + { + InvokeParseFunction::Result(dataType, tfTensor, numElements, tensorData); + + // If the tensor shape is not defined, but there is a value list, then interpret the data as a 1D + // tensor of the provided number of elements + if (numElements == 0) + { + const unsigned int tfNumElements = static_cast(tensorData.size()) / GetDataTypeSize(dataType); + dimensionSizes.push_back(tfNumElements); + } + } + // Get tensor data from tensor content attribute + else + { + tensorData.assign(tfTensor.tensor_content().begin(), tfTensor.tensor_content().end()); + + // Check if a tensor shape is defined for the tensor content + if (numElements == 0) + { + throw ParseException(boost::str( + boost::format("No tensor shape found for Const node - %1%") + % nodeDef.name())); + } + } + + // Const node requires at least a list of values or a content attribute + if (tensorData.empty()) + { + throw ParseException(boost::str( + boost::format("No tensor data found for Const node - %1%") + % nodeDef.name())); + } + + const TensorInfo tensorInfo(static_cast(dimensionSizes.size()), dimensionSizes.data(), dataType); + + // If we have a list of values, then the length of the list must be + // less than or equal to the number of elements implied by the shape argument + if (tensorData.size() > tensorInfo.GetNumBytes()) + { + throw ParseException(boost::str( + boost::format("Number of elements (%1%) should be less than or equal \ + to the number of elements implied by the shape argument (%2%) for Const node - %3%") + % (tensorData.size() / GetDataTypeSize(dataType)) + % tensorInfo.GetNumElements() + % nodeDef.name())); + } + + return InvokeParseFunction>::Result( + dataType, this, nodeDef, tensorData, tensorInfo); +} + +template +bool TfParser::HasParsedConstTensor(const std::string & nodeName) const +{ + auto it = m_ParsedTfOperations.find(nodeName); + if (it == m_ParsedTfOperations.end() || + dynamic_cast*>(it->second.get()) == nullptr) + { + return false; + } + else + { + return true; + } +} + +ParsedTfOperationPtr TfParser::ParseConv2D(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef) +{ + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 2); + IOutputSlot& inputSlot = inputs[0].m_IndexedValue->ResolveArmnnOutputSlot(inputs[0].m_Index); + TensorInfo inputTensorInfo = inputSlot.GetTensorInfo(); + + if (!HasParsedConstTensor(inputs[1].m_IndexedValue->GetNode().name())) + { + throw ParseException("ArmNN only supports Convolution layers with constant weights"); + } + ParsedConstTfOperation* weightNode = + boost::polymorphic_downcast *>(inputs[1].m_IndexedValue); + + std::string paddingString = ReadMandatoryNodeStringAttribute(nodeDef, "padding"); + std::string dataFormat = ReadMandatoryNodeStringAttribute(nodeDef, "data_format"); + std::vector strides = ReadMandatoryNodeUint32ListAttribute(nodeDef, "strides"); + + // read the dilations, if present - only [1,1,1,1] (the default) is supported + std::vector dilations = ReadOptionalNodeUint32ListAttribute(nodeDef, "dilations"); + if (!dilations.empty()) + { + for (auto dilation : dilations) + { + if (dilation != 1u) + { + throw ParseException("ArmNN only supports Convolution layers with dilations [1,1,1,1]"); + } + } + } + + Convolution2dDescriptor desc; + desc.m_BiasEnabled = false; + + if (dataFormat == "NHWC") + { + desc.m_StrideX = strides[2]; + desc.m_StrideY = strides[1]; + // Swizzle input to supported memory layout + inputTensorInfo = armnnUtils::Permuted(inputSlot.GetTensorInfo(), NHWCToArmNN); + } + else if (dataFormat == "NCHW") + { + desc.m_StrideX = strides[3]; + desc.m_StrideY = strides[2]; + } + else + { + throw ParseException("Unsupported data format passed for Conv2D. Only NHWC and NCHW supported"); + } + + uint32_t inputHeight = inputTensorInfo.GetShape()[2]; + uint32_t inputWidth = inputTensorInfo.GetShape()[3]; + + std::vector outputTensorData; + + ConstTensor weightTensor = weightNode->GetConstTensor(true, outputTensorData); + + uint32_t weightHeight = weightTensor.GetShape()[2]; + uint32_t weightWidth = weightTensor.GetShape()[3]; + + bool padding = false; + TensorInfo outputInfo; + if (paddingString == "SAME") + { + padding = true; + outputInfo = TensorInfo({ inputTensorInfo.GetShape()[0], + weightTensor.GetShape()[0], + static_cast(ceil( + static_cast(inputHeight) / + static_cast(desc.m_StrideY))), + static_cast(ceil( + static_cast(inputWidth) / + static_cast(desc.m_StrideX))) + }, DataType::Float32); + } + else if (paddingString == "VALID") + { + padding = false; + outputInfo = TensorInfo({ inputTensorInfo.GetShape()[0], + weightTensor.GetShape()[0], + static_cast(ceil( + static_cast(inputHeight - weightHeight + 1) / + static_cast(desc.m_StrideY))), + static_cast(ceil( + static_cast(inputWidth - weightWidth + 1) / + static_cast(desc.m_StrideX))) + }, DataType::Float32); + } + else + { + throw ParseException("Only 'SAME' and 'VALID' padding supported"); + } + + CalcPadding(inputHeight, weightHeight, desc.m_StrideY, desc.m_PadTop, desc.m_PadBottom, padding); + CalcPadding(inputWidth, weightWidth, desc.m_StrideX, desc.m_PadLeft, desc.m_PadRight, padding); + + IConnectableLayer* layer = m_Network->AddConvolution2dLayer(desc, weightTensor, nodeDef.name().c_str()); + layer->GetOutputSlot(0).SetTensorInfo(outputInfo); + + if (dataFormat == "NHWC") + { + layer = SwizzleInDeswizzleOut(*m_Network, inputSlot, *layer, nodeDef.name()); + } + else + { + inputSlot.Connect(layer->GetInputSlot(0)); + } + + return std::make_unique(this, nodeDef, layer); +} + +ParsedTfOperationPtr TfParser::ParseDepthwiseConv2D(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef) +{ + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 2); + IOutputSlot& inputSlot = inputs[0].m_IndexedValue->ResolveArmnnOutputSlot(inputs[0].m_Index); + TensorInfo inputTensorInfo = inputSlot.GetTensorInfo(); + + if (!HasParsedConstTensor(inputs[1].m_IndexedValue->GetNode().name())) + { + throw ParseException("ArmNN only supports Depthwise Convolution layers with constant weights"); + } + ParsedConstTfOperation* weightNode = + boost::polymorphic_downcast *>(inputs[1].m_IndexedValue); + + + std::string paddingString = ReadMandatoryNodeStringAttribute(nodeDef, "padding"); + std::string dataFormat = ReadMandatoryNodeStringAttribute(nodeDef, "data_format"); + std::vector strides = ReadMandatoryNodeUint32ListAttribute(nodeDef, "strides"); + + DepthwiseConvolution2dDescriptor desc; + desc.m_BiasEnabled = false; + + if (dataFormat == "NHWC") + { + desc.m_StrideX = strides[2]; + desc.m_StrideY = strides[1]; + // Swizzle input to supported memory layout + inputTensorInfo = armnnUtils::Permuted(inputSlot.GetTensorInfo(), NHWCToArmNN); + } + else if (dataFormat == "NCHW") + { + desc.m_StrideX = strides[3]; + desc.m_StrideY = strides[2]; + } + else + { + throw ParseException("Unsupported data format passed for DepthwiseConv2dNative. Only NHWC and NCHW supported"); + } + + uint32_t inputHeight = inputTensorInfo.GetShape()[2]; + uint32_t inputWidth = inputTensorInfo.GetShape()[3]; + + std::vector outputTensorData; + + ConstTensor weightTensor = weightNode->GetConstTensor(true, outputTensorData); + + uint32_t weightHeight = weightTensor.GetShape()[2]; + uint32_t weightWidth = weightTensor.GetShape()[3]; + + bool padding = false; + TensorInfo outputInfo; + if (paddingString == "SAME") + { + padding = true; + outputInfo = TensorInfo({ inputTensorInfo.GetShape()[0], + weightTensor.GetShape()[0] * weightTensor.GetShape()[1], + static_cast(ceil( + static_cast(inputHeight) / + static_cast(desc.m_StrideY))), + static_cast(ceil( + static_cast(inputWidth) / + static_cast(desc.m_StrideX))) + }, DataType::Float32); + } + else if (paddingString == "VALID") + { + padding = false; + outputInfo = TensorInfo({ inputTensorInfo.GetShape()[0], + weightTensor.GetShape()[0] * weightTensor.GetShape()[1], + static_cast(ceil( + static_cast(inputHeight - weightHeight + 1) / + static_cast(desc.m_StrideY))), + static_cast(ceil( + static_cast(inputWidth - weightWidth + 1) / + static_cast(desc.m_StrideX))) + }, DataType::Float32); + } + else + { + throw ParseException("Only 'SAME' and 'VALID' padding supported"); + } + + CalcPadding(inputHeight, weightHeight, desc.m_StrideY, desc.m_PadTop, desc.m_PadBottom, padding); + CalcPadding(inputWidth, weightWidth, desc.m_StrideX, desc.m_PadLeft, desc.m_PadRight, padding); + + IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(desc, weightTensor, nodeDef.name().c_str()); + layer->GetOutputSlot(0).SetTensorInfo(outputInfo); + + if (dataFormat == "NHWC") + { + layer = SwizzleInDeswizzleOut(*m_Network, inputSlot, *layer, nodeDef.name()); + } + else + { + inputSlot.Connect(layer->GetInputSlot(0)); + } + + return std::make_unique(this, nodeDef, layer); +} + +ParsedTfOperationPtr TfParser::ParseFusedBatchNorm(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef) +{ + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 5); + + if (!HasParsedConstTensor(inputs[1].m_IndexedValue->GetNode().name())) + { + throw ParseException("ArmNN only supports FusedBatchNormalization layers with constant scale"); + } + ParsedConstTfOperation* scaleNode = + boost::polymorphic_downcast *>(inputs[1].m_IndexedValue); + + if (!HasParsedConstTensor(inputs[2].m_IndexedValue->GetNode().name())) + { + throw ParseException("ArmNN only supports FusedBatchNormalization layers with constant offset"); + } + ParsedConstTfOperation* offsetNode = + boost::polymorphic_downcast *>(inputs[2].m_IndexedValue); + + if (!HasParsedConstTensor(inputs[3].m_IndexedValue->GetNode().name())) + { + throw ParseException("ArmNN only supports FusedBatchNormalization layers with constant mean"); + } + ParsedConstTfOperation* meanNode = + boost::polymorphic_downcast *>(inputs[3].m_IndexedValue); + + if (!HasParsedConstTensor(inputs[4].m_IndexedValue->GetNode().name())) + { + throw ParseException("ArmNN only supports FusedBatchNormalization layers with constant variance"); + } + ParsedConstTfOperation* varianceNode = + boost::polymorphic_downcast *>(inputs[4].m_IndexedValue); + + // The descriptor only has the epsilon attribute + BatchNormalizationDescriptor desc; + desc.m_Eps = ReadMandatoryNodeFloatAttribute(nodeDef, "epsilon"); + + // data for the parsed tensor args (scale, offset, mean, variance) must be stored locally until the layer is added + std::vector scaleTensorData; + ConstTensor scaleTensor = scaleNode->GetConstTensor(false, scaleTensorData); + + std::vector offsetTensorData; + ConstTensor offsetTensor = offsetNode->GetConstTensor(false, offsetTensorData); + + std::vector meanTensorData; + ConstTensor meanTensor = meanNode->GetConstTensor(false, meanTensorData); + + std::vector varianceTensorData; + ConstTensor varianceTensor = varianceNode->GetConstTensor(false, varianceTensorData); + + IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(desc, + meanTensor, + varianceTensor, + offsetTensor, + scaleTensor, + nodeDef.name().c_str()); + + IOutputSlot& inputSlot = inputs[0].m_IndexedValue->ResolveArmnnOutputSlot(inputs[0].m_Index); + + const std::string dataFormat = ReadMandatoryNodeStringAttribute(nodeDef, "data_format"); + + if (dataFormat == "NHWC") + { + const TensorInfo outputTensorInfo = armnnUtils::Permuted(inputSlot.GetTensorInfo(), NHWCToArmNN); + layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo); + layer = SwizzleInDeswizzleOut(*m_Network, inputSlot, *layer, nodeDef.name()); + } + else + { + layer->GetOutputSlot(0).SetTensorInfo(inputSlot.GetTensorInfo()); + inputSlot.Connect(layer->GetInputSlot(0)); + } + + return std::make_unique(this, nodeDef, layer); +} + +ParsedTfOperationPtr TfParser::ParseConcat(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef) +{ + std::vector nodes = GetTfInputNodes(nodeDef); + // In tensorflow, we have the last input of the Concat layer as the axis for concatenation + unsigned int numInputs = static_cast(nodes.size()); + unsigned int numConcatView = numInputs - 1; + + OriginsDescriptor concatDescriptor(static_cast(numConcatView), MaxNumOfTensorDimensions); + std::vectormergeDimSizes(MaxNumOfTensorDimensions, 0u); + + unsigned int mergeDim = 0; + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, numInputs); + + // The last input is the axis for concatenation + if (!HasParsedConstTensor(inputs[numInputs - 1].m_IndexedValue->GetNode().name())) + { + throw ParseException("ArmNN only supports Concat with constant axis"); + } + ParsedConstTfOperation* shapeNode = + boost::polymorphic_downcast*>(inputs[numInputs - 1].m_IndexedValue); + + std::vector axisTensorData; + ConstTensor axisTensor = shapeNode->GetConstTensor(false, axisTensorData); + + // This concatDim indicates the data format: 3 is the NHWC, 1 is the NCHW + const unsigned int concatDimInput = static_cast(axisTensorData[0]); + + // Armnn supports concatenation along the channel dimension for data format NHWC and NCHW + if (concatDimInput == 0 || concatDimInput == 2) + { + throw ParseException("The dimension for concatenation is not supported by Armnn"); + } + + // This is the only concatDim we support in Armnn + const unsigned int concatDim = 1; + for (unsigned int viewIndex = 0; viewIndex < numConcatView; ++viewIndex) + { + // need to double check whether it should be + IOutputSlot& inputSlot = + inputs[viewIndex].m_IndexedValue->ResolveArmnnOutputSlot(inputs[viewIndex].m_Index); + TensorInfo inputTensorInfo = inputSlot.GetTensorInfo(); + + if (inputTensorInfo.GetNumDimensions() != MaxNumOfTensorDimensions) + { + throw ParseException("The number of dimensions for input tensors of the concatenation op should be 4"); + } + + if (concatDimInput == 3) + { + inputTensorInfo = armnnUtils::Permuted(inputTensorInfo, NHWCToArmNN); + } + + for (unsigned int dim = 0; dim < MaxNumOfTensorDimensions; ++dim) + { + mergeDimSizes[dim] = inputTensorInfo.GetShape()[dim]; + } + + for (unsigned int j = 0; j < concatDim; ++j) + { + concatDescriptor.SetViewOriginCoord(viewIndex, j, 0); + } + + concatDescriptor.SetViewOriginCoord(viewIndex, concatDim, mergeDim); + mergeDim += mergeDimSizes[concatDim]; + + for (unsigned int j = concatDim+1; j < MaxNumOfTensorDimensions; ++j) + { + concatDescriptor.SetViewOriginCoord(viewIndex, j, 0); + } + } + + mergeDimSizes[concatDim] = mergeDim; + armnn::IConnectableLayer *layer = m_Network->AddMergerLayer(concatDescriptor, nodeDef.name().c_str()); + + layer->GetOutputSlot(0).SetTensorInfo(armnn::TensorInfo(MaxNumOfTensorDimensions, mergeDimSizes.data(), + DataType::Float32)); + + for (unsigned int v = 0; v < numConcatView; ++v) + { + IOutputSlot& inputSlot = inputs[v].m_IndexedValue->ResolveArmnnOutputSlot(inputs[v].m_Index); + if (concatDimInput == 3) + { + IConnectableLayer* const swizzleLayer = AddSwizzleLayer(*m_Network, inputSlot, NHWCToArmNN, + "swizzle_for-" + nodeDef.name()); + swizzleLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(v)); + } + else + { + inputSlot.Connect(layer->GetInputSlot(v)); + } + } + + if (concatDimInput == 3) + { + IConnectableLayer* const deswizzleLayer = AddSwizzleLayer(*m_Network, layer->GetOutputSlot(0), ArmNNToNHWC, + "deswizzle_for-" + nodeDef.name()); + layer = deswizzleLayer; + } + + return std::make_unique(this, nodeDef, layer); +} + +ParsedTfOperationPtr TfParser::ParseShape(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef) +{ + // Note: The Shape layer is handled in a special way, because: + // 1. ARMNN doesn't support int32 tensors which it outputs + // 2. ARMNN works with statically shaped tensors which are known at parse time + // 3. because of 1. and 2. we treat the output of Shape as a temporary const int32 + // tensor which may be used as an input to other ops, most likely a Reshape + + const tensorflow::DataType tfDataType = ReadMandatoryNodeTypeAttribute(nodeDef, "out_type"); + if (tfDataType != tensorflow::DT_INT32) + { + throw ParseException("Armnn only supports DT_INT32 as out_type"); + } + + const std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 1); + IOutputSlot& prevLayerOutputSlot = inputs[0].m_IndexedValue->ResolveArmnnOutputSlot(inputs[0].m_Index); + const TensorInfo& prevLayerTensorInfo = prevLayerOutputSlot.GetTensorInfo(); + unsigned int prevLayerDimensions = prevLayerTensorInfo.GetNumDimensions(); + + std::vector shapeTensorData; + shapeTensorData.reserve(prevLayerDimensions); + + for (unsigned int i=0; i(prevLayerTensorInfo.GetShape()[i])); + } + + TensorInfo shapeTensorInfo(1, &prevLayerDimensions, DataType::Signed32); + + return std::make_unique>(this, + nodeDef, + &shapeTensorData[0], + shapeTensorInfo); +} + +ParsedTfOperationPtr TfParser::ParseReshape(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef) +{ + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 2); + ParsedTfOperation* inputNode = inputs[0].m_IndexedValue; + + if (!HasParsedConstTensor(inputs[1].m_IndexedValue->GetNode().name())) + { + throw ParseException("ArmNN only supports Reshape layers with constant shapes"); + } + ParsedConstTfOperation* shapeNode = + boost::polymorphic_downcast*>(inputs[1].m_IndexedValue); + + armnn::IOutputSlot& prevLayerOutputSlot = inputNode->ResolveArmnnOutputSlot(inputs[0].m_Index); + TensorInfo inputTensorInfo = prevLayerOutputSlot.GetTensorInfo(); + + std::vector shapeTensorData; + ConstTensor shapeTensor = shapeNode->GetConstTensor(false, shapeTensorData); + const TensorInfo outputTensorInfo = PrepareReshape(inputTensorInfo, shapeTensorData); + + TensorShape targetShape = outputTensorInfo.GetShape(); + ReshapeDescriptor reshapeDesc; + reshapeDesc.m_TargetShape = targetShape; + + IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, nodeDef.name().c_str()); + prevLayerOutputSlot.Connect(layer->GetInputSlot(0)); + layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo); + + return std::make_unique(this, nodeDef, layer); +} + +ParsedTfOperationPtr TfParser::ParseResizeBilinear(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef) +{ + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 2); + + if (!HasParsedConstTensor(inputs[1].m_IndexedValue->GetNode().name())) + { + throw ParseException("ArmNN only supports ResizeBilinear layers with constant sizes"); + } + ParsedConstTfOperation* sizeNode = + boost::polymorphic_downcast*>(inputs[1].m_IndexedValue); + + // Check the align_corners attribute is not set + if (ReadOptionalNodeBoolAttribute(nodeDef, "align_corners", false)) + { + throw ParseException("ArmNN only supports ResizeBilinear layers with align_corners set to false"); + } + + // data for the parsed tensor args (size) must be stored locally + std::vector sizeTensorData; + ConstTensor sizeTensor = sizeNode->GetConstTensor(false, sizeTensorData); + + // The descriptor only has target height and width attributes, which we get from the size tensor + ResizeBilinearDescriptor desc; + desc.m_TargetHeight = static_cast (sizeTensorData[0]); + desc.m_TargetWidth = static_cast (sizeTensorData[1]); + + IConnectableLayer* layer = m_Network->AddResizeBilinearLayer(desc, nodeDef.name().c_str()); + + IOutputSlot& inputSlot = inputs[0].m_IndexedValue->ResolveArmnnOutputSlot(inputs[0].m_Index); + TensorInfo inputTensorInfo = inputSlot.GetTensorInfo(); + // the input shape is always in BHWC format, this will be swizzled below; for now, + // get the batch and channels to make up the ArmNN output shape with the target size + unsigned int outBatch = inputTensorInfo.GetShape()[0]; + unsigned int outChannels = inputTensorInfo.GetShape()[3]; + unsigned int outHeight = desc.m_TargetHeight; + unsigned int outWidth = desc.m_TargetWidth; + TensorShape outShape({outBatch, outChannels, outHeight, outWidth}); + // The output DataType is always Float32, regardless of the input DataType + const TensorInfo outputTensorInfo(outShape, armnn::DataType::Float32); + layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo); + + // TensorFlow ResizeBilinear input is always in BHWC format, so add swizzle and deswizzle layers + layer = SwizzleInDeswizzleOut(*m_Network, inputSlot, *layer, nodeDef.name()); + + return std::make_unique(this, nodeDef, layer); +} + +TensorInfo OutputShapeOfSqueeze(const tensorflow::NodeDef& nodeDef, TensorInfo inputTensorInfo) +{ + BOOST_ASSERT(nodeDef.op() == "Squeeze"); + tensorflow::DataType tfDataType = ReadMandatoryNodeTypeAttribute(nodeDef, "T"); + + DataType type; + if (tfDataType == tensorflow::DT_FLOAT) + { + type = DataType::Float32; + } + else if (tfDataType == tensorflow::DT_INT32) + { + type = DataType::Signed32; + } + else + { + throw ParseException(boost::str( + boost::format("Unsupported DataType %1% for Squeeze operation") + % tensorflow::DataType_Name(tfDataType))); + } + + std::vector squeezeDims = ReadOptionalNodeUint32ListAttribute(nodeDef, "squeeze_dims"); + if (squeezeDims.empty()) + { + for(unsigned int i = 0; i < inputTensorInfo.GetNumDimensions(); i++) + { + if (inputTensorInfo.GetShape()[i] == 1) + { + squeezeDims.push_back(i); + } + } + } + + std::vector outputDims; + for(unsigned int i = 0; i < inputTensorInfo.GetNumDimensions(); i++) + { + bool includeDimension = (std::find(squeezeDims.begin(), squeezeDims.end(), i) == squeezeDims.end()); + if (includeDimension) + { + outputDims.push_back(inputTensorInfo.GetShape()[i]); + } + } + + if (outputDims.size() > 4) + { + throw ParseException("Unsupported shape for Squeeze"); + } + + TensorInfo outTensorInfo = TensorInfo(boost::numeric_cast(outputDims.size()), + outputDims.data(), + type); + + return outTensorInfo; +} + +ParsedTfOperationPtr TfParser::ParseSqueeze(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef) +{ + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 1); + + IOutputSlot& prevLayerOutputSlot = inputs[0].m_IndexedValue->ResolveArmnnOutputSlot(inputs[0].m_Index); + TensorInfo inputTensorInfo = prevLayerOutputSlot.GetTensorInfo(); + + TensorInfo outputInfo; + outputInfo = OutputShapeOfSqueeze(nodeDef, inputTensorInfo); + + ReshapeDescriptor reshapeDesc; + reshapeDesc.m_TargetShape = outputInfo.GetShape(); + IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, nodeDef.name().c_str()); + prevLayerOutputSlot.Connect(layer->GetInputSlot(0)); + layer->GetOutputSlot(0).SetTensorInfo(outputInfo); + + return std::make_unique(this, nodeDef, layer); +} + +ParsedTfOperationPtr TfParser::ParseLrn(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef) +{ + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 1); + + NormalizationDescriptor normalizationDescriptor; + normalizationDescriptor.m_NormMethodType = NormalizationAlgorithmMethod::LocalBrightness; + normalizationDescriptor.m_NormChannelType = NormalizationAlgorithmChannel::Across; + normalizationDescriptor.m_Alpha = ReadMandatoryNodeFloatAttribute(nodeDef, "alpha"); + normalizationDescriptor.m_Beta = ReadMandatoryNodeFloatAttribute(nodeDef, "beta"); + normalizationDescriptor.m_K = ReadMandatoryNodeFloatAttribute(nodeDef, "bias"); + normalizationDescriptor.m_NormSize = ReadMandatoryNodeUint32Attribute(nodeDef, "depth_radius"); + + // The window size must be an odd value. For a window size of (2 * n + 1), TensorFlow defines depth_radius = n. + normalizationDescriptor.m_NormSize = normalizationDescriptor.m_NormSize * 2 + 1; + + IOutputSlot& prevLayerOutputSlot = inputs[0].m_IndexedValue->ResolveArmnnOutputSlot(inputs[0].m_Index); + + IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, + nodeDef.name().c_str()); + + const TensorInfo permutedInfo = armnnUtils::Permuted(prevLayerOutputSlot.GetTensorInfo(), NHWCToArmNN); + layer->GetOutputSlot(0).SetTensorInfo(permutedInfo); + + layer = SwizzleInDeswizzleOut(*m_Network, prevLayerOutputSlot, *layer, nodeDef.name()); + + return std::make_unique(this, nodeDef, layer); +} + +/// An ParsedTfOperation for a MatMul node. +/// Creation of the armnn FullyConnected layer is deferred until it is actually needed, because MatMul nodes are +/// often used for the first part of a biased FullyConnected (MatMul followed by Add) and in these cases armnn doesn't +/// need a separate layer for the MatMul. +class ParsedMatMulTfOperation : public DeferredSingleLayerParsedTfOperation +{ +public: + ParsedMatMulTfOperation(TfParser* parser, const tensorflow::NodeDef& node) + : DeferredSingleLayerParsedTfOperation(parser, node) + { + } + + void CreateLayerDeferred() override + { + BOOST_ASSERT(m_Layer == nullptr); + m_Layer = m_Parser->AddFullyConnectedLayer(m_Node, nullptr, m_Node.name().c_str()); + } +}; + +ParsedTfOperationPtr TfParser::ParseMatMul(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef) +{ + // Defer the creation of the layer (see ParsedMatMulTfOperation). + return std::make_unique(this, nodeDef); +} + +ParsedTfOperationPtr TfParser::ParseMul(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef) +{ + boost::ignore_unused(graphDef); + + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 2); + + IConnectableLayer* const layer = m_Network->AddMultiplicationLayer(nodeDef.name().c_str()); + IOutputSlot* input0Slot = &inputs[0].m_IndexedValue->ResolveArmnnOutputSlot(inputs[0].m_Index); + IOutputSlot* input1Slot = &inputs[1].m_IndexedValue->ResolveArmnnOutputSlot(inputs[1].m_Index); + + auto const input0NumDims = input0Slot->GetTensorInfo().GetNumDimensions(); + auto const input1NumDims = input1Slot->GetTensorInfo().GetNumDimensions(); + + if (input0NumDims < input1NumDims) + { + const bool isNHWC = true; + input0Slot = BroadcastForAddandMul(input1Slot, input0Slot, isNHWC, *m_Network, nodeDef); + } + if (input1NumDims < input0NumDims) + { + const bool isNHWC = true; + input1Slot = BroadcastForAddandMul(input0Slot, input1Slot, isNHWC, *m_Network, nodeDef); + } + + input0Slot->Connect(layer->GetInputSlot(0)); + input1Slot->Connect(layer->GetInputSlot(1)); + + if (input0NumDims < input1NumDims) + { + layer->GetOutputSlot(0).SetTensorInfo(input1Slot->GetTensorInfo()); + } + else + { + layer->GetOutputSlot(0).SetTensorInfo(input0Slot->GetTensorInfo()); + } + return std::make_unique(this, nodeDef, layer); +} + +ParsedTfOperationPtr TfParser::ParsePlaceholder(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef) +{ + boost::ignore_unused(graphDef); + + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 0); + + const LayerBindingId layerId = boost::numeric_cast(m_NetworkInputsBindingInfo.size()); + + auto it = m_InputShapes.find(nodeDef.name()); + if (it == m_InputShapes.end()) + { + throw ParseException("Missing input shape for Placeholder '" + nodeDef.name() + "'"); + } + TensorInfo tensorInfo(it->second, DataType::Float32); + + IConnectableLayer* const layer = m_Network->AddInputLayer(layerId, nodeDef.name().c_str()); + + layer->GetOutputSlot(0).SetTensorInfo(tensorInfo); + + TrackInputBinding(layer, layerId, tensorInfo); + + return std::make_unique(this, nodeDef, layer); +} + +ParsedTfOperationPtr TfParser::ParseRelu(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef) +{ + boost::ignore_unused(graphDef); + + ActivationDescriptor activationDesc; + activationDesc.m_Function = ActivationFunction::ReLu; + return AddActivationLayer(nodeDef, activationDesc); +} + +ParsedTfOperationPtr TfParser::ParseRelu6(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef) +{ + boost::ignore_unused(graphDef); + + ActivationDescriptor activationDesc; + activationDesc.m_Function = ActivationFunction::BoundedReLu; + activationDesc.m_A = 6.0f; + activationDesc.m_B = 0.0f; + + return AddActivationLayer(nodeDef, activationDesc); +} + +ParsedTfOperationPtr TfParser::ParseSigmoid(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef) +{ + boost::ignore_unused(graphDef); + + ActivationDescriptor activationDesc; + activationDesc.m_Function = ActivationFunction::Sigmoid; + + return AddActivationLayer(nodeDef, activationDesc); +} + +ParsedTfOperationPtr TfParser::ParseSoftmax(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef) +{ + boost::ignore_unused(graphDef); + + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 1); + + SoftmaxDescriptor softmaxDescriptor; + IConnectableLayer* const layer = m_Network->AddSoftmaxLayer(softmaxDescriptor, nodeDef.name().c_str()); + + IOutputSlot& prevLayerSlot = inputs[0].m_IndexedValue->ResolveArmnnOutputSlot(inputs[0].m_Index); + prevLayerSlot.Connect(layer->GetInputSlot(0)); + layer->GetOutputSlot(0).SetTensorInfo(prevLayerSlot.GetTensorInfo()); + + return std::make_unique(this, nodeDef, layer); +} + +ParsedTfOperationPtr TfParser::ParseSoftplus(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef) +{ + boost::ignore_unused(graphDef); + + ActivationDescriptor activationDesc; + activationDesc.m_Function = ActivationFunction::SoftReLu; + + return AddActivationLayer(nodeDef, activationDesc); +} + +ParsedTfOperationPtr TfParser::ParseTanh(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef) +{ + boost::ignore_unused(graphDef); + + ActivationDescriptor activationDesc; + activationDesc.m_Function = ActivationFunction::TanH; + activationDesc.m_A = 1.0f; + activationDesc.m_B = 1.0f; + + return AddActivationLayer(nodeDef, activationDesc); +} + +ParsedTfOperationPtr TfParser::AddActivationLayer(const tensorflow::NodeDef& nodeDef, + ActivationDescriptor& activationDesc) +{ + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 1); + + IConnectableLayer* const layer = m_Network->AddActivationLayer(activationDesc, nodeDef.name().c_str()); + + IOutputSlot& prevLayerOutputSlot = inputs[0].m_IndexedValue->ResolveArmnnOutputSlot(inputs[0].m_Index); + prevLayerOutputSlot.Connect(layer->GetInputSlot(0)); + layer->GetOutputSlot(0).SetTensorInfo(prevLayerOutputSlot.GetTensorInfo()); + return std::make_unique(this, nodeDef, layer); +} + +ParsedTfOperationPtr TfParser::ParseMaxPool(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef) +{ + return ParsePooling2d(nodeDef, graphDef, PoolingAlgorithm::Max); +} + +ParsedTfOperationPtr TfParser::ParseAvgPool(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef) +{ + return ParsePooling2d(nodeDef, graphDef, PoolingAlgorithm::Average); +} + +ParsedTfOperationPtr TfParser::ParsePooling2d(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef, PoolingAlgorithm pooltype) +{ + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 1); + IOutputSlot& inputSlot = inputs[0].m_IndexedValue->ResolveArmnnOutputSlot(inputs[0].m_Index); + TensorInfo inputTensorInfo = inputSlot.GetTensorInfo(); + + if (inputs.size() != 1) + { + throw ParseException("2D Pooling expects one input!"); + } + + std::string paddingString = ReadMandatoryNodeStringAttribute(nodeDef, "padding"); + std::string dataFormat = ReadMandatoryNodeStringAttribute(nodeDef, "data_format"); + std::vector strides = ReadMandatoryNodeUint32ListAttribute(nodeDef, "strides"); + std::vector ksize = ReadMandatoryNodeUint32ListAttribute(nodeDef, "ksize"); // size of pool windows + + Pooling2dDescriptor pooling2dDescriptor; + pooling2dDescriptor.m_PoolType = pooltype; + pooling2dDescriptor.m_PaddingMethod = PaddingMethod::Exclude; + pooling2dDescriptor.m_OutputShapeRounding = OutputShapeRounding::Floor; + + if (dataFormat == "NHWC") + { + pooling2dDescriptor.m_StrideX = strides[2]; + pooling2dDescriptor.m_StrideY = strides[1]; + pooling2dDescriptor.m_PoolWidth = ksize[2]; + pooling2dDescriptor.m_PoolHeight = ksize[1]; + // Swizzle input to supported memory layout + inputTensorInfo = armnnUtils::Permuted(inputSlot.GetTensorInfo(), NHWCToArmNN); + } + else if (dataFormat == "NCHW") + { + pooling2dDescriptor.m_StrideX = strides[3]; + pooling2dDescriptor.m_StrideY = strides[2]; + pooling2dDescriptor.m_PoolWidth = ksize[3]; + pooling2dDescriptor.m_PoolHeight = ksize[2]; + } + else + { + throw ParseException("Only NHWC or NCHW supported for Pooling2d"); + } + + uint32_t inputHeight = inputTensorInfo.GetShape()[2]; + uint32_t inputWidth = inputTensorInfo.GetShape()[3]; + + bool padding = false; + TensorInfo outputInfo; + if (paddingString == "SAME") + { + padding = true; + outputInfo = TensorInfo({ inputTensorInfo.GetShape()[0], + inputTensorInfo.GetShape()[1], + static_cast(ceil( + static_cast(inputHeight) / + static_cast(pooling2dDescriptor.m_StrideY))), + static_cast(ceil( + static_cast(inputWidth) / + static_cast(pooling2dDescriptor.m_StrideX))) + }, DataType::Float32); + } + else if (paddingString == "VALID") + { + padding = false; + outputInfo = TensorInfo({ inputTensorInfo.GetShape()[0], + inputTensorInfo.GetShape()[1], + static_cast(ceil( + static_cast(inputHeight - pooling2dDescriptor.m_PoolHeight + 1) / + static_cast(pooling2dDescriptor.m_StrideY))), + static_cast(ceil( + static_cast(inputWidth - pooling2dDescriptor.m_PoolWidth + 1) / + static_cast(pooling2dDescriptor.m_StrideX))) + }, DataType::Float32); + } + else + { + throw ParseException("Only 'SAME' and 'VALID' padding supported"); + } + + CalcPadding(inputWidth, pooling2dDescriptor.m_PoolWidth, pooling2dDescriptor.m_StrideX, + pooling2dDescriptor.m_PadLeft, pooling2dDescriptor.m_PadRight, padding); + CalcPadding(inputHeight, pooling2dDescriptor.m_PoolHeight, pooling2dDescriptor.m_StrideY, + pooling2dDescriptor.m_PadTop, pooling2dDescriptor.m_PadBottom, padding); + + + IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, nodeDef.name().c_str()); + if (layer == nullptr) + { + throw ParseException("Failed to add pooling2d layer"); + } + + layer->GetOutputSlot(0).SetTensorInfo(outputInfo); + + if (dataFormat == "NHWC") + { + layer = SwizzleInDeswizzleOut(*m_Network, inputSlot, *layer, nodeDef.name()); + } + else + { + inputSlot.Connect(layer->GetInputSlot(0)); + } + + return std::make_unique(this, nodeDef, layer); +} + +ParsedTfOperationPtr TfParser::AddAdditionLayer(const tensorflow::NodeDef& nodeDef, bool isBiasAdd) +{ + std::vector inputs = GetInputParsedTfOperationsChecked(nodeDef, 2); + + IOutputSlot* input0Slot = &inputs[0].m_IndexedValue->ResolveArmnnOutputSlot(inputs[0].m_Index); + IOutputSlot* input1Slot = &inputs[1].m_IndexedValue->ResolveArmnnOutputSlot(inputs[1].m_Index); + + const TensorInfo& input0Info = input0Slot->GetTensorInfo(); + const TensorInfo& input1Info = input1Slot->GetTensorInfo(); + + if (isBiasAdd) + { + // BiasAdd takes bias as a 1D tensor. We need to add a reshape layer to create a 4D tensor + // with the same data in the correct dimension for broadcast in addition. + if(input1Info.GetNumDimensions() != 1) + { + throw ParseException("Unsupported bias for BiasAdd. It should be a 1D vector."); + } + + const std::string dataFormat = ReadMandatoryNodeStringAttribute(nodeDef, "data_format"); + const bool isNHWC = (dataFormat == "NHWC"); + const bool isNCHW = (dataFormat == "NCHW"); + + if (!isNHWC && ! isNCHW) + { + throw ParseException("Only NHWC or NCHW supported for BiasAdd"); + } + + input1Slot = BroadcastForAddandMul(input0Slot, input1Slot, isNHWC, *m_Network, nodeDef); + } + else + { + if (input0Info.GetNumDimensions() == 1) + { + const bool isNHWC = true; + input0Slot = BroadcastForAddandMul(input1Slot, input0Slot, isNHWC, *m_Network, nodeDef); + } + + if (input1Info.GetNumDimensions() == 1) + { + const bool isNHWC = true; + input1Slot = BroadcastForAddandMul(input0Slot, input1Slot, isNHWC, *m_Network, nodeDef); + } + } + + IConnectableLayer* const layer = m_Network->AddAdditionLayer(nodeDef.name().c_str()); + + input0Slot->Connect(layer->GetInputSlot(0)); + input1Slot->Connect(layer->GetInputSlot(1)); + + if (input0Info.GetNumDimensions() == 1 && isBiasAdd == false) + { + layer->GetOutputSlot(0).SetTensorInfo(input1Slot->GetTensorInfo()); + } + else + { + layer->GetOutputSlot(0).SetTensorInfo(input0Slot->GetTensorInfo()); + } + + return std::make_unique(this, nodeDef, layer); +} + +IConnectableLayer* TfParser::AddFullyConnectedLayer(const tensorflow::NodeDef& matMulNodeDef, + const tensorflow::NodeDef* addNodeDef, const char* armnnLayerName) +{ + // find bias const (if applicable) + ParsedConstTfOperation* biasNode = nullptr; + if (addNodeDef != nullptr) + { + std::vector addInputs = GetInputParsedTfOperationsChecked(*addNodeDef, 2); + // find our inputs + if (HasParsedConstTensor(addInputs[0].m_IndexedValue->GetNode().name())) + { + biasNode = boost::polymorphic_downcast*>(addInputs[0].m_IndexedValue); + } + else if (HasParsedConstTensor(addInputs[1].m_IndexedValue->GetNode().name())) + { + biasNode = boost::polymorphic_downcast*>(addInputs[1].m_IndexedValue); + } + else + { + throw ParseException("ArmNN only supports fully connected layers with constant bias"); + } + } + + // find matmul inputs + ParsedConstTfOperation* weightNode = nullptr; + ParsedTfOperation* inputNode = nullptr; + unsigned int inputIdx = 0; + std::vector mulInputs = GetInputParsedTfOperationsChecked(matMulNodeDef, 2); + if (HasParsedConstTensor(mulInputs[0].m_IndexedValue->GetNode().name())) + { + weightNode = boost::polymorphic_downcast*>(mulInputs[0].m_IndexedValue); + inputNode = mulInputs[1].m_IndexedValue; + inputIdx = mulInputs[1].m_Index; + } + else if (HasParsedConstTensor(mulInputs[1].m_IndexedValue->GetNode().name())) + { + weightNode = boost::polymorphic_downcast*>(mulInputs[1].m_IndexedValue); + inputNode = mulInputs[0].m_IndexedValue; + inputIdx = mulInputs[0].m_Index; + } + else + { + throw ParseException("ArmNN only supports fully connected layers with constant weights"); + } + + std::vector weightTensorData; + // handle weight + ConstTensor weights = weightNode->GetConstTensor(false, weightTensorData); + + FullyConnectedDescriptor desc; + desc.m_BiasEnabled = addNodeDef != nullptr; + + IConnectableLayer* layer = nullptr; + // make the layer + if (addNodeDef != nullptr) + { + std::vector biasTensorData; + ConstTensor biases = biasNode->GetConstTensor(false, biasTensorData); + + if (weights.GetShape()[1] != biases.GetShape()[0]) + { + throw ParseException("shape of matmul and bias do not match"); + } + + layer = m_Network->AddFullyConnectedLayer(desc, weights, biases, armnnLayerName); + } + else + { + layer = m_Network->AddFullyConnectedLayer(desc, weights, armnnLayerName); + } + + BOOST_ASSERT(layer != nullptr); + + inputNode->ResolveArmnnOutputSlot(inputIdx).Connect(layer->GetInputSlot(0)); + unsigned int batches = inputNode->ResolveArmnnOutputSlot(inputIdx).GetTensorInfo().GetShape()[0]; + + // handle output + TensorInfo outputInfo({ batches, weights.GetShape()[1] }, DataType::Float32); + layer->GetOutputSlot(0).SetTensorInfo(outputInfo); + return layer; +} + +void TfParser::LoadNodeDef(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef) +{ + // get the type of the node (assume float) + tensorflow::DataType type = tensorflow::DT_FLOAT; + if (nodeDef.attr().count("T") != 0) + { + auto attr = nodeDef.attr().at("T"); + type = attr.type(); + } + else if (nodeDef.attr().count("dtype") != 0) + { + auto attr = nodeDef.attr().at("dtype"); + type = attr.type(); + } + + if (type != tensorflow::DT_FLOAT && nodeDef.op() != "Const") + { + throw ParseException("Currently only FLOAT is supported for tensorflow nodes (apart from Const)"); + } + + const std::string& operation = nodeDef.op(); + auto it = ms_OperationNameToParsingFunctions.find(operation); + if (it != ms_OperationNameToParsingFunctions.end()) + { + auto func = it->second; + ParsedTfOperationPtr parsedTfOperation = (this->*func)(nodeDef, graphDef); + ParsedTfOperation* parsedTfOperationRaw = parsedTfOperation.get(); + + // Store the parsed operation so that dependent layers can connect to it + auto it = m_ParsedTfOperations.find(nodeDef.name()); + if (it != m_ParsedTfOperations.end()) + { + throw ParseException(boost::str(boost::format("Name %1% used by more than one node") % nodeDef.name())); + } + m_ParsedTfOperations[nodeDef.name()] = std::move(parsedTfOperation); + + // If this node was requested as an output from the network then add an ArmNN output layer + if (std::find(m_RequestedOutputs.begin(), m_RequestedOutputs.end(), nodeDef.name()) != + m_RequestedOutputs.end()) + { + auto outId = ParseOutputId(nodeDef.name()); + const LayerBindingId layerId = boost::numeric_cast(m_NetworkOutputsBindingInfo.size()); + IOutputSlot& prevSlot = parsedTfOperationRaw->ResolveArmnnOutputSlot(outId.m_Index); + + TensorInfo tensorInfo = prevSlot.GetTensorInfo(); + + IConnectableLayer* outputLayer = m_Network->AddOutputLayer(layerId, nodeDef.name().c_str()); + + prevSlot.Connect(outputLayer->GetInputSlot(0)); + + TrackOutputBinding(outputLayer, layerId, tensorInfo); + } + } + else + { + throw ParseException(boost::str( + boost::format("Unsupported operation %1% in tensorflow::GraphDef") % operation)); + } +} + +void TfParser::LoadGraphDef(const tensorflow::GraphDef& graphDef) +{ + // add all nodes to our map + m_NodesByName.clear(); + m_NetworkInputsBindingInfo.clear(); + m_NetworkOutputsBindingInfo.clear(); + + for (int i = 0; i < graphDef.node_size(); ++i) + { + const tensorflow::NodeDef& node = graphDef.node(i); + m_NodesByName[node.name()] = &node; + } + + // Find the output nodes the user requested + std::vector targetNodes; + for (const std::string& requestedOutputName : m_RequestedOutputs) + { + auto nodeIt = m_NodesByName.find(requestedOutputName); + if (nodeIt == m_NodesByName.end()) + { + throw ParseException("Couldn't find requested output node '" + requestedOutputName + "' in graph"); + } + targetNodes.push_back(nodeIt->second); + } + + // Sort them into a linear ordering such that all inputs of a node are before the node itself + std::vector sortedNodes; + if (!armnnUtils::GraphTopologicalSort( + targetNodes, + [this](const tensorflow::NodeDef* node) + { + auto outputs = GetTfInputNodes(*node); + std::vector nodesOnly; + for (const auto & o : outputs) { + nodesOnly.push_back(o.m_IndexedValue); + } + return nodesOnly; + }, + sortedNodes)) + { + throw ParseException("Cycle detected in graph"); + } + + // Parse each node in order, knowing that all inputs of a node will be processed before the node itself + for (const auto& it : sortedNodes) + { + const tensorflow::NodeDef& currentNode = *it; + LoadNodeDef(currentNode, graphDef); + } +} + +INetworkPtr TfParser::CreateNetworkFromTextFile(const char* graphFile, + const std::map& inputShapes, + const std::vector& requestedOutputs) +{ + FILE* fd = fopen(graphFile, "r"); + + if (fd == nullptr) + { + std::stringstream error; + error << "Graph file " << graphFile << " failed to open"; + throw FileNotFoundException(error.str()); + } + + // Parse the file into a message + tensorflow::GraphDef graphDef; + auto input = new google::protobuf::io::FileInputStream(fileno(fd)); + bool success = google::protobuf::TextFormat::Parse(input, &graphDef); + delete input; + fclose(fd); + + if (!success) + { + std::stringstream error; + error << "Failed to parse graph file"; + throw ParseException(error.str()); + } + + return CreateNetworkFromGraphDef(graphDef, inputShapes, requestedOutputs); +} + +INetworkPtr TfParser::CreateNetworkFromString(const char* protoText, + const std::map& inputShapes, + const std::vector& requestedOutputs) +{ + // Parse the string into a message + tensorflow::GraphDef graphDef; + bool success = google::protobuf::TextFormat::ParseFromString(protoText, &graphDef); + + if (!success) + { + std::stringstream error; + error << "Failed to parse graph file"; + throw ParseException(error.str()); + } + + return CreateNetworkFromGraphDef(graphDef, inputShapes, requestedOutputs); +} + +INetworkPtr TfParser::CreateNetworkFromBinaryFile(const char* graphFile, + const std::map& inputShapes, + const std::vector& requestedOutputs) +{ + FILE* fd = fopen(graphFile, "rb"); + + if (fd == nullptr) + { + std::stringstream error; + error << "Graph file " << graphFile << " failed to open"; + throw FileNotFoundException(error.str()); + } + + // Parse the file into a message + tensorflow::GraphDef graphDef; + + google::protobuf::io::FileInputStream inStream(fileno(fd)); + google::protobuf::io::CodedInputStream codedStream(&inStream); + codedStream.SetTotalBytesLimit(INT_MAX, INT_MAX); + bool success = graphDef.ParseFromCodedStream(&codedStream); + fclose(fd); + + if (!success) + { + std::stringstream error; + error << "Failed to parse protobuf file" << graphFile; + throw ParseException(error.str()); + } + + return CreateNetworkFromGraphDef(graphDef, inputShapes, requestedOutputs); +} + +INetworkPtr TfParser::CreateNetworkFromGraphDef(const tensorflow::GraphDef& graphDef, + const std::map& inputShapes, + const std::vector& requestedOutputs) +{ + m_Network = INetwork::Create(); + + m_InputShapes = inputShapes; + if (requestedOutputs.size() == 0) + { + throw ParseException("requestedOutputs must have at least one entry"); + } + m_RequestedOutputs = requestedOutputs; + + try + { + LoadGraphDef(graphDef); + } + catch (const ParseException& e) + { + Cleanup(); + throw e; + } + + Cleanup(); + + return std::move(m_Network); +} + +void TfParser::Cleanup() +{ + // cleanup, in case we reuse this parser + m_InputShapes.clear(); + m_RequestedOutputs.clear(); + m_NodesByName.clear(); + m_ParsedTfOperations.clear(); +} + +BindingPointInfo TfParser::GetNetworkInputBindingInfo(const std::string& name) const +{ + return GetBindingInfo(name, "input", m_NetworkInputsBindingInfo); +} + +BindingPointInfo TfParser::GetNetworkOutputBindingInfo(const std::string& name) const +{ + return GetBindingInfo(name, "output", m_NetworkOutputsBindingInfo); +} + +std::pair TfParser::GetBindingInfo(const std::string& layerName, + const char* bindingPointDesc, + const std::unordered_map& nameToBindingInfo) +{ + auto it = nameToBindingInfo.find(layerName); + if (it == nameToBindingInfo.end()) + { + throw InvalidArgumentException(boost::str(boost::format("Unknown %1% '%2%'") % bindingPointDesc % layerName)); + } + return it->second; +} + +void TfParser::TrackInputBinding(IConnectableLayer* layer, LayerBindingId id, const TensorInfo& tensorInfo) +{ + return TrackBindingPoint(layer, id, tensorInfo, "input", m_NetworkInputsBindingInfo); +} + +void TfParser::TrackOutputBinding(IConnectableLayer* layer, LayerBindingId id, const TensorInfo& tensorInfo) +{ + return TrackBindingPoint(layer, id, tensorInfo, "output", m_NetworkOutputsBindingInfo); +} + +void TfParser::TrackBindingPoint(IConnectableLayer* layer, + LayerBindingId id, + const TensorInfo& tensorInfo, + const char* bindingPointDesc, + std::unordered_map& nameToBindingInfo) +{ + const std::string layerName = layer->GetName(); + auto it = nameToBindingInfo.find(layerName); + if (it == nameToBindingInfo.end()) + { + nameToBindingInfo[layerName] = std::make_pair(id, tensorInfo); + } + else + { + throw ParseException(boost::str( + boost::format("Id %1% used by more than one %2% layer") % id % bindingPointDesc)); + } +} + +} // namespace armnnTfParser diff --git a/src/armnnTfParser/TfParser.hpp b/src/armnnTfParser/TfParser.hpp new file mode 100644 index 0000000..c5b4bce --- /dev/null +++ b/src/armnnTfParser/TfParser.hpp @@ -0,0 +1,199 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// +#pragma once + +#include "armnnTfParser/ITfParser.hpp" + +#include "armnn/Types.hpp" +#include "armnn/Tensor.hpp" +#include "armnn/INetwork.hpp" + +#include +#include +#include +#include + +namespace armnn +{ +class TensorInfo; +} + +namespace tensorflow +{ +class GraphDef; +class NodeDef; +} + +namespace armnnTfParser +{ + +using BindingPointInfo = std::pair; + +class ParsedTfOperation; +using ParsedTfOperationPtr = std::unique_ptr; + +/// +/// WithOutputTensorIndex wraps a value and an index. The purpose of +/// this template is to signify that in Tensorflow the input name of +/// a layer has the convention of 'inputTensorName:#index' where the +/// #index can be omitted and it implicitly means the 0. output of +/// the referenced layer. By supporting this notation we can handle +/// layers with multiple outputs, such as Split. +/// +template +struct WithOutputTensorIndex +{ + T m_IndexedValue; + unsigned int m_Index; + + WithOutputTensorIndex(const T & value, unsigned int index) + : m_IndexedValue{value} + , m_Index{index} {} + + WithOutputTensorIndex(T && value, unsigned int index) + : m_IndexedValue{value} + , m_Index{index} {} +}; + +using OutputOfParsedTfOperation = WithOutputTensorIndex; +using OutputOfConstNodeDef = WithOutputTensorIndex; +using OutputId = WithOutputTensorIndex; + +class TfParser : public ITfParser +{ +public: + /// Create the network from a protobuf text file on disk + virtual armnn::INetworkPtr CreateNetworkFromTextFile( + const char* graphFile, + const std::map& inputShapes, + const std::vector& requestedOutputs) override; + + /// Create the network from a protobuf binary file on disk + virtual armnn::INetworkPtr CreateNetworkFromBinaryFile( + const char* graphFile, + const std::map& inputShapes, + const std::vector& requestedOutputs) override; + + /// Create the network directly from protobuf text in a string. Useful for debugging/testing + virtual armnn::INetworkPtr CreateNetworkFromString( + const char* protoText, + const std::map& inputShapes, + const std::vector& requestedOutputs) override; + + /// Retrieve binding info (layer id and tensor info) for the network input identified by the given layer name + virtual BindingPointInfo GetNetworkInputBindingInfo(const std::string& name) const override; + + /// Retrieve binding info (layer id and tensor info) for the network output identified by the given layer name + virtual BindingPointInfo GetNetworkOutputBindingInfo(const std::string& name) const override; + +public: + TfParser(); + +private: + template + friend class ParsedConstTfOperation; + friend class ParsedMatMulTfOperation; + + /// Parses a GraphDef loaded into memory from one of the other CreateNetwork* + armnn::INetworkPtr CreateNetworkFromGraphDef(const tensorflow::GraphDef& graphDef, + const std::map& inputShapes, + const std::vector& requestedOutputs); + + /// sets up variables and then performs BFS to parse all nodes + void LoadGraphDef(const tensorflow::GraphDef& graphDef); + + /// parses a given node, assuming nodes before it in graph have been done + void LoadNodeDef(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + + /// Handling identity layers as the input for Conv2D layer + const tensorflow::NodeDef* ResolveIdentityNode(const tensorflow::NodeDef* nodeDef); + /// Finds the nodes connected as inputs of the given node in the graph. + std::vector GetTfInputNodes(const tensorflow::NodeDef& nodeDef) const; + /// Finds the IParsedTfOperations for the nodes connected as inputs of the given node in the graph, + /// and throws an exception if the number of inputs does not match the expected one. + /// This will automatically resolve any identity nodes. The result vector contains the parsed operation + /// together with the output tensor index to make the connection unambiguous. + std::vector GetInputParsedTfOperationsChecked(const tensorflow::NodeDef& nodeDef, + std::size_t expectedNumInputs); + + ParsedTfOperationPtr ParseConst(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + + /// Checks if there is a pre-parsed const tensor is available with the given name and Type + template + bool HasParsedConstTensor(const std::string & nodeName) const; + + ParsedTfOperationPtr ParseAdd(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseBiasAdd(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseConv2D(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseDepthwiseConv2D(const tensorflow::NodeDef& nodeDef,const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseFusedBatchNorm(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseConcat(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseIdentity(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseLrn(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseMatMul(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseMul(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParsePlaceholder(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseRelu(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseRelu6(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseReshape(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseResizeBilinear(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseShape(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseSqueeze(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseSigmoid(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseSoftmax(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseSoftplus(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseTanh(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseMaxPool(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParseAvgPool(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef); + ParsedTfOperationPtr ParsePooling2d(const tensorflow::NodeDef& nodeDef, const tensorflow::GraphDef& graphDef, + armnn::PoolingAlgorithm pooltype); + ParsedTfOperationPtr AddActivationLayer(const tensorflow::NodeDef& nodeDef, armnn::ActivationDescriptor& desc); + ParsedTfOperationPtr AddAdditionLayer(const tensorflow::NodeDef& nodeDef, bool isBiasAdd = false); + armnn::IConnectableLayer* AddFullyConnectedLayer(const tensorflow::NodeDef& matMulNodeDef, + const tensorflow::NodeDef* addNodeDef, const char* armnnLayerName); + + static std::pair GetBindingInfo(const std::string& layerName, + const char* bindingPointDesc, + const std::unordered_map& nameToBindingInfo); + + void TrackInputBinding(armnn::IConnectableLayer* layer, + armnn::LayerBindingId id, + const armnn::TensorInfo& tensorInfo); + + void TrackOutputBinding(armnn::IConnectableLayer* layer, + armnn::LayerBindingId id, + const armnn::TensorInfo& tensorInfo); + + static void TrackBindingPoint(armnn::IConnectableLayer* layer, armnn::LayerBindingId id, + const armnn::TensorInfo& tensorInfo, + const char* bindingPointDesc, + std::unordered_map& nameToBindingInfo); + + void Cleanup(); + + /// The network we're building. Gets cleared after it is passed to the user + armnn::INetworkPtr m_Network; + + using OperationParsingFunction = ParsedTfOperationPtr(TfParser::*)(const tensorflow::NodeDef& nodeDef, + const tensorflow::GraphDef& graphDef); + + /// map of TensorFlow operation names to parsing member functions + static const std::map ms_OperationNameToParsingFunctions; + + std::map m_InputShapes; + std::vector m_RequestedOutputs; + + /// map of nodes extracted from the GraphDef to speed up parsing + std::unordered_map m_NodesByName; + + std::unordered_map m_ParsedTfOperations; + + /// maps input layer names to their corresponding ids and tensor infos + std::unordered_map m_NetworkInputsBindingInfo; + + /// maps output layer names to their corresponding ids and tensor infos + std::unordered_map m_NetworkOutputsBindingInfo; +}; +} diff --git a/src/armnnTfParser/test/Activations.cpp b/src/armnnTfParser/test/Activations.cpp new file mode 100644 index 0000000..72ed64d --- /dev/null +++ b/src/armnnTfParser/test/Activations.cpp @@ -0,0 +1,113 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + + +struct ActivationFixture : public ParserPrototxtFixture +{ + explicit ActivationFixture(const char* activationFunction) + { + m_Prototext = "node {\n" + " name: \"Placeholder\"\n" + " op: \"Placeholder\"\n" + " attr {\n" + " key: \"dtype\"\n" + " value {\n" + " type: DT_FLOAT\n" + " }\n" + " }\n" + " attr {\n" + " key: \"shape\"\n" + " value {\n" + " shape {\n" + " unknown_rank: true\n" + " }\n" + " }\n" + " }\n" + "}\n" + "node {\n" + " name: \""; + m_Prototext.append(activationFunction); + m_Prototext.append("\"\n" + " op: \""); + m_Prototext.append(activationFunction); + m_Prototext.append("\"\n" + " input: \"Placeholder\"\n" + " attr {\n" + " key: \"T\"\n" + " value {\n" + " type: DT_FLOAT\n" + " }\n" + " }\n" + "}\n"); + + SetupSingleInputSingleOutput({ 1, 7 }, "Placeholder", activationFunction); + } +}; + + +struct ReLuFixture : ActivationFixture +{ + ReLuFixture() : ActivationFixture("Relu") {} +}; +BOOST_FIXTURE_TEST_CASE(ParseReLu, ReLuFixture) +{ + RunTest<2>({ -1.0f, -0.5f, 1.25f, -3.0f, 0.0f, 0.5f, -0.75f }, + { 0.0f, 0.0f, 1.25f, 0.0f, 0.0f, 0.5f, 0.0f }); +} + + +struct ReLu6Fixture : ActivationFixture +{ + ReLu6Fixture() : ActivationFixture("Relu6") {} +}; +BOOST_FIXTURE_TEST_CASE(ParseReLu6, ReLu6Fixture) +{ + RunTest<2>({ -1.0f, -0.5f, 7.25f, -3.0f, 0.0f, 0.5f, -0.75f }, + { 0.0f, 0.0f, 6.0f, 0.0f, 0.0f, 0.5f, 0.0f }); +} + + +struct SigmoidFixture : ActivationFixture +{ + SigmoidFixture() : ActivationFixture("Sigmoid") {} +}; +BOOST_FIXTURE_TEST_CASE(ParseSigmoid, SigmoidFixture) +{ + RunTest<2>({ -0.1f, -0.2f, -0.3f, -0.4f, 0.1f, 0.2f, 0.3f }, + { 0.4750208f, 0.45016602f, 0.42555749f, 0.40131235f, 0.52497917f, 0.54983395f, 0.57444251f }); +} + + +struct SoftplusFixture : ActivationFixture +{ + SoftplusFixture() : ActivationFixture("Softplus") {} +}; +BOOST_FIXTURE_TEST_CASE(ParseSoftplus, SoftplusFixture) +{ + RunTest<2>({ -0.1f, -0.2f, -0.3f, -0.4f, 0.1f, 0.2f, 0.3f }, + { 0.64439666f, 0.59813893f, 0.55435526f, 0.51301527f, 0.74439669f, 0.7981388f, 0.85435522f }); +} + + +struct TanhFixture : ActivationFixture +{ + TanhFixture() : ActivationFixture("Tanh") {} +}; +BOOST_FIXTURE_TEST_CASE(ParseTanh, TanhFixture) +{ + RunTest<2>({ -0.1f, -0.2f, -0.3f, -0.4f, 0.1f, 0.2f, 0.3f }, + { -0.09966799f, -0.19737528f, -0.29131261f, -0.379949f, 0.09966799f, 0.19737528f, 0.29131261f }); +} + + + + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/Addition.cpp b/src/armnnTfParser/test/Addition.cpp new file mode 100644 index 0000000..c9e6926 --- /dev/null +++ b/src/armnnTfParser/test/Addition.cpp @@ -0,0 +1,78 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct AdditionFixture : public ParserPrototxtFixture +{ + AdditionFixture() + { + m_Prototext = "node { \n" + " name: \"graphInput\" \n" + " op: \"Placeholder\" \n" + " attr { \n" + " key: \"dtype\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"shape\" \n" + " value { \n" + " shape { \n" + " } \n" + " } \n" + " } \n" + " } \n" + " node { \n" + " name: \"softmax1\" \n" + " op: \"Softmax\" \n" + " input: \"graphInput\" \n" + " attr { \n" + " key: \"T\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " }\n" + " node {\n" + " name: \"softmax2\"\n" + " op : \"Softmax\"\n" + " input: \"graphInput\"\n" + " attr { \n" + " key: \"T\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " }\n" + " node {\n" + " name: \"addition\"\n" + " op : \"Add\"\n" + " input: \"softmax1\"\n" + " input: \"softmax2\"\n" + " attr { \n" + " key: \"T\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " }\n"; + + SetupSingleInputSingleOutput({ 1, 7 }, "graphInput", "addition"); + } +}; + +BOOST_FIXTURE_TEST_CASE(ParseAddition, AdditionFixture) +{ + RunTest<2>({ 0, 0, 10000, 0, 0, 0, 0 }, { 0, 0, 2, 0, 0, 0, 0 }); +} + + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/BiasAdd.cpp b/src/armnnTfParser/test/BiasAdd.cpp new file mode 100644 index 0000000..e29aeb1 --- /dev/null +++ b/src/armnnTfParser/test/BiasAdd.cpp @@ -0,0 +1,104 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct BiasAddFixture : public ParserPrototxtFixture +{ + explicit BiasAddFixture(const std::string& dataFormat) + { + m_Prototext = R"( +node { + name: "graphInput" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } +} +node { + name: "bias" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 3 + } + } + float_val: 1 + float_val: 2 + float_val: 3 + } + } + } +} +node { + name: "biasAdd" + op : "BiasAdd" + input: "graphInput" + input: "bias" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "data_format" + value { + s: ")" + dataFormat + R"(" + } + } +} +)"; + + SetupSingleInputSingleOutput({ 1, 3, 1, 3 }, "graphInput", "biasAdd"); + } +}; + +struct BiasAddFixtureNCHW : BiasAddFixture +{ + BiasAddFixtureNCHW() : BiasAddFixture("NCHW") {} +}; + +struct BiasAddFixtureNHWC : BiasAddFixture +{ + BiasAddFixtureNHWC() : BiasAddFixture("NHWC") {} +}; + +BOOST_FIXTURE_TEST_CASE(ParseBiasAddNCHW, BiasAddFixtureNCHW) +{ + RunTest<4>(std::vector(9), { 1, 1, 1, 2, 2, 2, 3, 3, 3 }); +} + +BOOST_FIXTURE_TEST_CASE(ParseBiasAddNHWC, BiasAddFixtureNHWC) +{ + RunTest<4>(std::vector(9), { 1, 2, 3, 1, 2, 3, 1, 2, 3 }); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/BroadcastForAdd.cpp b/src/armnnTfParser/test/BroadcastForAdd.cpp new file mode 100644 index 0000000..4c9731d --- /dev/null +++ b/src/armnnTfParser/test/BroadcastForAdd.cpp @@ -0,0 +1,149 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" +// This is a special case for add, which supports broadcasting +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct BroadcastForAddFixtureSlot1 : public ParserPrototxtFixture +{ + BroadcastForAddFixtureSlot1() + { + m_Prototext = R"( + node { + name: "graphInput" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } + } + node { + name: "Const_1" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 4.0 + float_val: 5.0 + } + } + } + } + node { + name: "Add" + op: "Add" + input: "graphInput" + input: "Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + )"; + + SetupSingleInputSingleOutput({ 1, 2, 2, 2 }, "graphInput", "Add"); + } +}; + +struct BroadcastForAddFixtureSlot0 : public ParserPrototxtFixture +{ + BroadcastForAddFixtureSlot0() + { + m_Prototext = R"( + node { + name: "graphInput" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } + } + node { + name: "Const_1" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + } + float_val: 4.0 + float_val: 5.0 + } + } + } + } + node { + name: "Add" + op: "Add" + input: "Const_1" + input: "graphInput" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + )"; + + SetupSingleInputSingleOutput({ 1, 2, 2, 2 }, "graphInput", "Add"); + } +}; + + +BOOST_FIXTURE_TEST_CASE(ParseBroadcastForAddition1, BroadcastForAddFixtureSlot1) +{ + RunTest<4>({ 1.0, 1.0, 2.0, 2.0, 3.0, 3.0, 4.0, 4.0 }, { 5.0, 6.0, 6.0, 7.0, 7.0, 8.0, 8.0, 9.0 }); +} + +BOOST_FIXTURE_TEST_CASE(ParseBroadcastForAddition0, BroadcastForAddFixtureSlot0) +{ + RunTest<4>({ 1.0, 1.0, 2.0, 2.0, 3.0, 3.0, 4.0, 4.0 }, { 5.0, 6.0, 6.0, 7.0, 7.0, 8.0, 8.0, 9.0 }); +} + + + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/Concat.cpp b/src/armnnTfParser/test/Concat.cpp new file mode 100644 index 0000000..a7d5ea0 --- /dev/null +++ b/src/armnnTfParser/test/Concat.cpp @@ -0,0 +1,183 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct ConcatFixture : public ParserPrototxtFixture +{ + explicit ConcatFixture(const armnn::TensorShape& inputShape0, const armnn::TensorShape& inputShape1, + unsigned int concatDim) + { + m_Prototext = R"( + node { + name: "graphInput0" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } + } + node { + name: "graphInput1" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } + } + node { + name: "concat/axis" + op: "Const" + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: )"; + + m_Prototext += std::to_string(concatDim); + + m_Prototext += R"( + } + } + } + } + node { + name: "concat" + op: "ConcatV2" + input: "graphInput0" + input: "graphInput1" + input: "concat/axis" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_FLOAT + } + } + } + )"; + + Setup({{"graphInput0", inputShape0 }, + {"graphInput1", inputShape1 }}, {"concat"}); + } +}; + +struct ConcatFixtureNCHW : ConcatFixture +{ + ConcatFixtureNCHW() : ConcatFixture({ 1, 1, 2, 2 }, { 1, 1, 2, 2 }, 1 ) {} +}; + +struct ConcatFixtureNHWC : ConcatFixture +{ + ConcatFixtureNHWC() : ConcatFixture({ 1, 1, 2, 2 }, { 1, 1, 2, 2 }, 3 ) {} +}; + +BOOST_FIXTURE_TEST_CASE(ParseConcatNCHW, ConcatFixtureNCHW) +{ + RunTest<4>({{"graphInput0", {0.0, 1.0, 2.0, 3.0}}, + {"graphInput1", {4.0, 5.0, 6.0, 7.0}}}, + {{"concat", { 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0 }}}); +} + +BOOST_FIXTURE_TEST_CASE(ParseConcatNHWC, ConcatFixtureNHWC) +{ + RunTest<4>({{"graphInput0", {0.0, 1.0, 2.0, 3.0}}, + {"graphInput1", {4.0, 5.0, 6.0, 7.0}}}, + {{"concat", { 0.0, 1.0, 4.0, 5.0, 2.0, 3.0, 6.0, 7.0 }}}); +} + +struct ConcatFixtureDim1 : ConcatFixture +{ + ConcatFixtureDim1() : ConcatFixture({ 1, 2, 3, 4 }, { 1, 2, 3, 4 }, 1) {} +}; + +struct ConcatFixtureDim3 : ConcatFixture +{ + ConcatFixtureDim3() : ConcatFixture({ 1, 2, 3, 4 }, { 1, 2, 3, 4 }, 3) {} +}; + +BOOST_FIXTURE_TEST_CASE(ParseConcatDim1, ConcatFixtureDim1) +{ + RunTest<4>({ { "graphInput0", { 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, + 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 } }, + { "graphInput1", { 50.0, 51.0, 52.0, 53.0, 54.0, 55.0, 56.0, 57.0, 58.0, 59.0, 60.0, 61.0, + 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0 } } }, + { { "concat", { 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, + 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, + 50.0, 51.0, 52.0, 53.0, 54.0, 55.0, 56.0, 57.0, 58.0, 59.0, 60.0, 61.0, + 62.0, 63.0, 64.0, 65.0, 66.0, 67.0, 68.0, 69.0, 70.0, 71.0, 72.0, 73.0 } } }); +} + +BOOST_FIXTURE_TEST_CASE(ParseConcatDim3, ConcatFixtureDim3) +{ + RunTest<4>({ { "graphInput0", { 0.0, 1.0, 2.0, 3.0, + 4.0, 5.0, 6.0, 7.0, + 8.0, 9.0, 10.0, 11.0, + 12.0, 13.0, 14.0, 15.0, + 16.0, 17.0, 18.0, 19.0, + 20.0, 21.0, 22.0, 23.0 } }, + { "graphInput1", { 50.0, 51.0, 52.0, 53.0, + 54.0, 55.0, 56.0, 57.0, + 58.0, 59.0, 60.0, 61.0, + 62.0, 63.0, 64.0, 65.0, + 66.0, 67.0, 68.0, 69.0, + 70.0, 71.0, 72.0, 73.0 } } }, + { { "concat", { 0.0, 1.0, 2.0, 3.0, + 50.0, 51.0, 52.0, 53.0, + 4.0, 5.0, 6.0, 7.0, + 54.0, 55.0, 56.0, 57.0, + 8.0, 9.0, 10.0, 11.0, + 58.0, 59.0, 60.0, 61.0, + 12.0, 13.0, 14.0, 15.0, + 62.0, 63.0, 64.0, 65.0, + 16.0, 17.0, 18.0, 19.0, + 66.0, 67.0, 68.0, 69.0, + 20.0, 21.0, 22.0, 23.0, + 70.0, 71.0, 72.0, 73.0 } } }); +} + +BOOST_AUTO_TEST_SUITE_END() \ No newline at end of file diff --git a/src/armnnTfParser/test/ConcatOfConcats.cpp b/src/armnnTfParser/test/ConcatOfConcats.cpp new file mode 100644 index 0000000..7316b9f --- /dev/null +++ b/src/armnnTfParser/test/ConcatOfConcats.cpp @@ -0,0 +1,316 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct ConcatOfConcatsFixture : public ParserPrototxtFixture +{ + explicit ConcatOfConcatsFixture(const armnn::TensorShape& inputShape0, const armnn::TensorShape& inputShape1, + const armnn::TensorShape& inputShape2, const armnn::TensorShape& inputShape3, + unsigned int concatDim) + { + m_Prototext = R"( + node { + name: "graphInput0" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } + } + node { + name: "graphInput1" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } + } + node { + name: "graphInput2" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } + } + node { + name: "graphInput3" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } + } + node { + name: "Relu" + op: "Relu" + input: "graphInput0" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + node { + name: "Relu_1" + op: "Relu" + input: "graphInput1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + node { + name: "Relu_2" + op: "Relu" + input: "graphInput2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + node { + name: "Relu_3" + op: "Relu" + input: "graphInput3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + node { + name: "concat/axis" + op: "Const" + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: )"; + m_Prototext += std::to_string(concatDim); + m_Prototext += R"( + } + } + } + } + node { + name: "concat" + op: "ConcatV2" + input: "Relu" + input: "Relu_1" + input: "concat/axis" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + } + node { + name: "concat_1/axis" + op: "Const" + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: )"; + m_Prototext += std::to_string(concatDim); + m_Prototext += R"( + } + } + } + } + node { + name: "concat_1" + op: "ConcatV2" + input: "Relu_2" + input: "Relu_3" + input: "concat_1/axis" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + } + node { + name: "concat_2/axis" + op: "Const" + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + } + int_val: )"; + m_Prototext += std::to_string(concatDim); + m_Prototext += R"( + } + } + } + } + node { + name: "concat_2" + op: "ConcatV2" + input: "concat" + input: "concat_1" + input: "concat_2/axis" + attr { + key: "N" + value { + i: 2 + } + } + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "Tidx" + value { + type: DT_INT32 + } + } + } + )"; + + Setup({{ "graphInput0", inputShape0 }, + { "graphInput1", inputShape1 }, + { "graphInput2", inputShape2 }, + { "graphInput3", inputShape3}}, {"concat_2"}); + } +}; + +struct ConcatOfConcatsFixtureNCHW : ConcatOfConcatsFixture +{ + ConcatOfConcatsFixtureNCHW() : ConcatOfConcatsFixture({ 1, 1, 2, 2 }, { 1, 1, 2, 2 }, { 1, 1, 2, 2 }, + { 1, 1, 2, 2 }, 1 ) {} +}; + +struct ConcatOfConcatsFixtureNHWC : ConcatOfConcatsFixture +{ + ConcatOfConcatsFixtureNHWC() : ConcatOfConcatsFixture({ 1, 1, 2, 2 }, { 1, 1, 2, 2 }, { 1, 1, 2, 2 }, + { 1, 1, 2, 2 }, 3 ) {} +}; + +BOOST_FIXTURE_TEST_CASE(ParseConcatOfConcatsNCHW, ConcatOfConcatsFixtureNCHW) +{ + RunTest<4>({{"graphInput0", {0.0, 1.0, 2.0, 3.0}}, + {"graphInput1", {4.0, 5.0, 6.0, 7.0}}, + {"graphInput2", {8.0, 9.0, 10.0, 11.0}}, + {"graphInput3", {12.0, 13.0, 14.0, 15.0}}}, + {{"concat_2", { 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, + 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0 }}}); +} + +BOOST_FIXTURE_TEST_CASE(ParseConcatOfConcatsNHWC, ConcatOfConcatsFixtureNHWC) +{ + RunTest<4>({{"graphInput0", {0.0, 1.0, 2.0, 3.0}}, + {"graphInput1", {4.0, 5.0, 6.0, 7.0}}, + {"graphInput2", {8.0, 9.0, 10.0, 11.0}}, + {"graphInput3", {12.0, 13.0, 14.0, 15.0}}}, + {{"concat_2", { 0.0, 1.0, 4.0, 5.0, 8.0, 9.0, 12.0, 13.0, + 2.0, 3.0, 6.0, 7.0, 10.0, 11.0, 14.0, 15.0 }}}); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/Constant.cpp b/src/armnnTfParser/test/Constant.cpp new file mode 100644 index 0000000..09587fc --- /dev/null +++ b/src/armnnTfParser/test/Constant.cpp @@ -0,0 +1,321 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include + +#include "armnnTfParser/ITfParser.hpp" + +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +// Tests that a Const node in Tensorflow can be converted to a ConstLayer in armnn (as opposed to most +// Const nodes which are used as weight inputs for convolutions etc. and are therefore not converted to +// armnn ConstLayers). +struct ConstantFixture : public ParserPrototxtFixture +{ + ConstantFixture() + { + // input = tf.placeholder(tf.float32, name = "input") + // const = tf.constant([17], tf.float32, [1]) + // output = tf.add(input, const, name = "output") + m_Prototext = + R"( +node { + name: "input" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + unknown_rank: true + } + } + } +} +node { + name: "Const" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 1 + } + } + float_val: 17.0 + } + } + } +} +node { + name: "output" + op: "Add" + input: "input" + input: "Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} + )"; + SetupSingleInputSingleOutput({ 1 }, "input", "output"); + } +}; + +BOOST_FIXTURE_TEST_CASE(Constant, ConstantFixture) +{ + RunTest<1>({1}, {18}); +} + + +// Tests that a single Const node in Tensorflow can be used twice by a dependant node. This should result in only +// a single armnn ConstLayer being created. +struct ConstantReusedFixture : public ParserPrototxtFixture +{ + ConstantReusedFixture() + { + // const = tf.constant([17], tf.float32, [1]) + // output = tf.add(const, const, name = "output") + m_Prototext = + R"( +node { + name: "Const" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 1 + } + } + float_val: 17.0 + } + } + } +} +node { + name: "output" + op: "Add" + input: "Const" + input: "Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} + )"; + Setup({}, { "output" }); + } +}; + +BOOST_FIXTURE_TEST_CASE(ConstantReused, ConstantReusedFixture) +{ + RunTest<1>({}, { { "output", { 34 } } }); +} + +template +struct ConstantValueListFixture : public ParserPrototxtFixture +{ + ConstantValueListFixture() + { + m_Prototext = + R"( +node { + name: "output" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 2 + } + dim { + size: 3 + } + })"; + + double value = 0.75; + for (int i = 0; i < ListSize; i++, value += 0.25) + { + m_Prototext += std::string("float_val : ") + std::to_string(value) + "\n"; + } + + m_Prototext += + R"( + } + } + } +} + )"; + Setup({}, { "output" }); + } +}; + +using ConstantSingleValueListFixture = ConstantValueListFixture<1>; +using ConstantMultipleValueListFixture = ConstantValueListFixture<4>; +using ConstantMaxValueListFixture = ConstantValueListFixture<6>; + +BOOST_FIXTURE_TEST_CASE(ConstantSingleValueList, ConstantSingleValueListFixture) +{ + RunTest<2>({}, { { "output", { 0.75f, 0.75f, 0.75f, 0.75f, 0.75f, 0.75f } } }); +} +BOOST_FIXTURE_TEST_CASE(ConstantMultipleValueList, ConstantMultipleValueListFixture) +{ + RunTest<2>({}, { { "output", { 0.75f, 1.f, 1.25f, 1.5f, 1.5f, 1.5f } } }); +} +BOOST_FIXTURE_TEST_CASE(ConstantMaxValueList, ConstantMaxValueListFixture) +{ + RunTest<2>({}, { { "output", { 0.75f, 1.f, 1.25f, 1.50f, 1.75f, 2.f } } }); +} + +template +struct ConstantCreateFixture : public ParserPrototxtFixture +{ + ConstantCreateFixture() + { + m_Prototext = + R"( +node { + name: "output" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + )"; + + if (WithShape) + { + m_Prototext += + R"( +tensor_shape { + dim { + size: 2 + } + dim { + size: 2 + } +} + )"; + } + else + { + m_Prototext += + R"( +tensor_shape { +} + )"; + } + + if (WithContent) + { + m_Prototext += + R"( +tensor_content: "\000\000\200?\000\000\200?\000\000\200?\000\000\200?\000\000\200?" + )"; + } + + if (WithValueList) + { + m_Prototext += + R"( +float_val: 1.0 +float_val: 1.0 +float_val: 1.0 +float_val: 1.0 +float_val: 1.0 + )"; + } + + m_Prototext += + R"( + } + } + } +} + )"; + } +}; + +using ConstantCreateNoValueListFixture = ConstantCreateFixture; +using ConstantCreateNoValueList2Fixture = ConstantCreateFixture; +using ConstantCreateNoContentFixture = ConstantCreateFixture; +using ConstantCreateNoContent2Fixture = ConstantCreateFixture; +using ConstantCreateNoShapeFixture = ConstantCreateFixture; +using ConstantCreateNoShape2Fixture = ConstantCreateFixture; +using ConstantCreateNoShape3Fixture = ConstantCreateFixture; + +BOOST_FIXTURE_TEST_CASE(ConstantCreateInvalidValueList, ConstantCreateNoValueListFixture) +{ + BOOST_REQUIRE_THROW(Setup({}, { "output" }), armnn::ParseException); +} +BOOST_FIXTURE_TEST_CASE(ConstantCreateInvalidValueList2, ConstantCreateNoValueList2Fixture) +{ + BOOST_REQUIRE_THROW(Setup({}, { "output" }), armnn::ParseException); +} +BOOST_FIXTURE_TEST_CASE(ConstantCreateInvalidContent, ConstantCreateNoContentFixture) +{ + BOOST_REQUIRE_THROW(Setup({}, { "output" }), armnn::ParseException); +} +BOOST_FIXTURE_TEST_CASE(ConstantCreateInvalidShape, ConstantCreateNoShapeFixture) +{ + BOOST_REQUIRE_THROW(Setup({}, { "output" }), armnn::ParseException); +} +BOOST_FIXTURE_TEST_CASE(ConstantCreateNoShape2, ConstantCreateNoShape2Fixture) +{ + BOOST_REQUIRE_THROW(Setup({}, { "output" }), armnn::ParseException); +} +BOOST_FIXTURE_TEST_CASE(ConstantCreateNoShape3, ConstantCreateNoShape3Fixture) +{ + Setup({}, { "output" }); + RunTest<1>({}, { { "output", { 1.f, 1.f, 1.f, 1.f, 1.f } } }); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/Convolution2d.cpp b/src/armnnTfParser/test/Convolution2d.cpp new file mode 100644 index 0000000..a7c7648 --- /dev/null +++ b/src/armnnTfParser/test/Convolution2d.cpp @@ -0,0 +1,322 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" +#include +#include + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct Convolution2dFixture : public ParserPrototxtFixture +{ + explicit Convolution2dFixture(const char* paddingType) + : Convolution2dFixture(paddingType, 1) + {} + + // dilation: 0 - dilations attribute is not included; + // dilation: >0 - dilations attribute set to [1,v,v,1], where v is the value of the dilation arg + explicit Convolution2dFixture(const char* paddingType, int stride, int dilation = 0) + { + std::string strideString = std::to_string(stride); + std::string dilationString = std::to_string(dilation); + m_Prototext = "node { \n" + " name: \"graphInput\" \n" + " op: \"Placeholder\" \n" + " attr { \n" + " key: \"dtype\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"shape\" \n" + " value { \n" + " shape { \n" + " } \n" + " } \n" + " } \n" + " } \n" + " node { \n" + " name: \"Const_1\" \n" + " op: \"Const\" \n" + " attr { \n" + " key: \"dtype\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"value\" \n" + " value { \n" + " tensor { \n" + " dtype: DT_FLOAT \n" + " tensor_shape { \n" + " dim { \n" + " size: 1 \n" + " } \n" + " dim { \n" + " size: 3 \n" + " } \n" + " dim { \n" + " size: 1 \n" + " } \n" + " dim { \n" + " size: 1 \n" + " } \n" + " } \n" + " tensor_content: \"\\000\\000\\000?\\000\\000\\200?\\000\\000\\000?\" \n" + " } \n" + " } \n" + " } \n" + "} \n" + "node { \n" + " name: \"potato\" \n" + " op: \"Conv2D\" \n" + " input: \"graphInput\" \n" + " input: \"Const_1\" \n" + " attr { \n" + " key: \"T\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"data_format\" \n" + " value { \n" + " s: \"NHWC\" \n" + " } \n" + " } \n" + " attr { \n" + " key: \"padding\" \n" + " value { \n" + " s: \""; + m_Prototext.append(paddingType); + m_Prototext.append("\"\n" + " } \n" + " } \n" + " attr { \n" + " key: \"strides\" \n" + " value { \n" + " list { \n" + " i: 1 \n" + " i: 1 \n" + " i: "); + m_Prototext.append(strideString); + m_Prototext.append(" \n" + " i: 1 \n" + " } \n" + " } \n" + " } \n"); + + if (dilation > 0) + { + m_Prototext.append(" attr { \n" + " key: \"dilations\" \n" + " value { \n" + " list { \n" + " i: 1 \n" + " i: "); + m_Prototext.append(dilationString); + m_Prototext.append(" \n" + " i: "); + m_Prototext.append(dilationString); + m_Prototext.append(" \n" + " i: 1 \n" + " } \n" + " } \n" + " } \n"); + } + m_Prototext.append(" attr { \n" + " key: \"use_cudnn_on_gpu\" \n" + " value { \n" + " b: false \n" + " } \n" + " } \n" + "} \n"); + + // Manual height computation based on stride parameter. + BOOST_ASSERT_MSG(stride == 1 || stride==2, "Add support for strides other than 1 or 2."); + unsigned int dims[] = {1,2,3,1}; + if (stride == 2) + { + dims[1]=3; + } + + SetupSingleInputSingleOutput(armnn::TensorShape(4, dims), "graphInput", "potato"); + } +}; + + +struct Convolution2dSameFixture : Convolution2dFixture +{ + Convolution2dSameFixture() : Convolution2dFixture("SAME", 1){} +}; +BOOST_FIXTURE_TEST_CASE(ParseConv2DSame, Convolution2dSameFixture) +{ + RunTest<4>({1, 2, 3, 4, 5, 6}, {2, 4, 4, 6.5f, 10 , 8.5f}); +} + +struct Convolution2dValidFixture : Convolution2dFixture +{ + Convolution2dValidFixture() : Convolution2dFixture("VALID", 1){} +}; +BOOST_FIXTURE_TEST_CASE(ParseConv2DValid, Convolution2dValidFixture) +{ + RunTest<4>({1, 2, 3, 4, 5, 6}, {4, 10}); +} + + +struct Convolution2dStride2SameFixture : Convolution2dFixture +{ + Convolution2dStride2SameFixture() : Convolution2dFixture("SAME", 2){} +}; +BOOST_FIXTURE_TEST_CASE(ParseConv2DStride2Same, Convolution2dStride2SameFixture) +{ + RunTest<4>({1, 2, 3, 4, 5, 6, 7, 8, 9}, {2, 4, 6.5, 8.5, 11, 13}); +} + + +struct Convolution2dStride2ValidFixture : Convolution2dFixture +{ + Convolution2dStride2ValidFixture() : Convolution2dFixture("VALID", 2){} +}; +BOOST_FIXTURE_TEST_CASE(ParseConv2DStride2Valid, Convolution2dStride2ValidFixture) +{ + RunTest<4>({1, 2, 3, 4, 5, 6, 7, 8, 9}, {4, 10, 16}); +} + + +struct Convolution2dDilation1Fixture : Convolution2dFixture +{ + Convolution2dDilation1Fixture() : Convolution2dFixture("SAME", 1, 1){} +}; +BOOST_FIXTURE_TEST_CASE(ParseConv2DDilation1, Convolution2dDilation1Fixture) +{ + RunTest<4>({1, 2, 3, 4, 5, 6}, {2, 4, 4, 6.5f, 10 , 8.5f}); +} + +BOOST_AUTO_TEST_CASE(ParseConv2DDilation2) +{ + const char* prototext = "" + "node {\n" + " name: \"graphInput\"\n" + " op: \"Placeholder\"\n" + " attr {\n" + " key: \"dtype\"\n" + " value {\n" + " type: DT_FLOAT\n" + " }\n" + " }\n" + " attr {\n" + " key: \"shape\"\n" + " value {\n" + " shape {\n" + " }\n" + " }\n" + " }\n" + "}\n" + "node {\n" + " name: \"Const_1\"\n" + " op: \"Const\"\n" + " attr {\n" + " key: \"dtype\"\n" + " value {\n" + " type: DT_FLOAT\n" + " }\n" + " }\n" + " attr {\n" + " key: \"value\"\n" + " value {\n" + " tensor {\n" + " dtype: DT_FLOAT\n" + " tensor_shape {\n" + " dim {\n" + " size: 1\n" + " }\n" + " dim {\n" + " size: 3\n" + " }\n" + " dim {\n" + " size: 1\n" + " }\n" + " dim {\n" + " size: 1\n" + " }\n" + " }\n" + " tensor_content: \"\\000\\000\\000?\\000\\000\\200?\\000\\000\\000?\"\n" + " }\n" + " }\n" + " }\n" + "}\n" + "node {\n" + " name: \"potato\"\n" + " op: \"Conv2D\"\n" + " input: \"graphInput\"\n" + " input: \"Const_1\"\n" + " attr {\n" + " key: \"T\"\n" + " value {\n" + " type: DT_FLOAT\n" + " }\n" + " }\n" + " attr {\n" + " key: \"data_format\"\n" + " value {\n" + " s: \"NHWC\"\n" + " }\n" + " }\n" + " attr {\n" + " key: \"padding\"\n" + " value {\n" + " s: \"SAME\"\n" + " }\n" + " }\n" + " attr {\n" + " key: \"strides\"\n" + " value {\n" + " list {\n" + " i: 1\n" + " i: 1\n" + " i: 1\n" + " i: 1\n" + " }\n" + " }\n" + " }\n" + " attr {\n" + " key: \"dilations\"\n" + " value {\n" + " list {\n" + " i: 1\n" + " i: 2\n" + " i: 2\n" + " i: 1\n" + " }\n" + " }\n" + " }\n" + " attr {\n" + " key: \"use_cudnn_on_gpu\"\n" + " value {\n" + " b: false\n" + " }\n" + " }\n" + "}\n"; + + std::map inputShapes; + armnn::TensorShape tensorShape = { 1, 3, 3, 1 }; + inputShapes["graphInput"] = tensorShape; + armnnTfParser::ITfParserPtr parser = armnnTfParser::ITfParser::Create(); + BOOST_CHECK_EXCEPTION(parser->CreateNetworkFromString(prototext, inputShapes, { "potato" }), + armnn::ParseException, + [] (armnn::ParseException const& ex)->bool + { + return strcmp(ex.what(), + "ArmNN only supports Convolution layers with dilations [1,1,1,1]") == 0; + }); +} + + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/DepthwiseConvolution2d.cpp b/src/armnnTfParser/test/DepthwiseConvolution2d.cpp new file mode 100644 index 0000000..84e7a7e --- /dev/null +++ b/src/armnnTfParser/test/DepthwiseConvolution2d.cpp @@ -0,0 +1,166 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" +#include +#include + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct DepthwiseConvolution2dFixture : public ParserPrototxtFixture +{ + explicit DepthwiseConvolution2dFixture(const char* paddingType) + { + m_Prototext = "node { \n" + " name: \"graphInput\" \n" + " op: \"Placeholder\" \n" + " attr { \n" + " key: \"dtype\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"value\" \n" + " value { \n" + " tensor { \n" + " dtype: DT_FLOAT \n" + " tensor_shape { \n" + " dim { \n" + " size: 1 \n" + " } \n" + " dim { \n" + " size: 1 \n" + " } \n" + " dim { \n" + " size: 3 \n" + " } \n" + " dim { \n" + " size: 3 \n" + " } \n" + " } \n" + " tensor_content: \"\\000\\000\\200?\\000\\000\\000@\\000\\000@@\\000\\000\\200@" + "\\000\\000\\240@\\000\\000\\300@\\000\\000\\340@\\000\\000\\000A\\000\\000\\020A\" \n" + " } \n" + " } \n" + " } \n" + " } \n" + " node { \n" + " name: \"Const_1\" \n" + " op: \"Const\" \n" + " attr { \n" + " key: \"dtype\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"value\" \n" + " value { \n" + " tensor { \n" + " dtype: DT_FLOAT \n" + " tensor_shape { \n" + " dim { \n" + " size: 1 \n" + " } \n" + " dim { \n" + " size: 3 \n" + " } \n" + " dim { \n" + " size: 3 \n" + " } \n" + " dim { \n" + " size: 3 \n" + " } \n" + " } \n" + " tensor_content: \"\\000\\000\\000?\\000\\000\\200?\\000\\000\\000?" + "\\000\\000\\000?\\000\\000\\200?\\000\\000\\000?" + "\\000\\000\\000?\\000\\000\\200?\\000\\000\\000?" + "\\000\\000\\000?\\000\\000\\200?\\000\\000\\000?" + "\\000\\000\\000?\\000\\000\\200?\\000\\000\\000?" + "\\000\\000\\000?\\000\\000\\200?\\000\\000\\000?" + "\\000\\000\\000?\\000\\000\\200?\\000\\000\\000?" + "\\000\\000\\000?\\000\\000\\200?\\000\\000\\000?" + "\\000\\000\\000?\\000\\000\\200?\\000\\000\\000?\" \n" + " } \n" + " } \n" + " } \n" + "} \n" + "node { \n" + " name: \"potato\" \n" + " op: \"DepthwiseConv2dNative\" \n" + " input: \"graphInput\" \n" + " input: \"Const_1\" \n" + " attr { \n" + " key: \"T\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"data_format\" \n" + " value { \n" + " s: \"NHWC\" \n" + " } \n" + " } \n" + " attr { \n" + " key: \"padding\" \n" + " value { \n" + " s: \""; + m_Prototext.append(paddingType); + m_Prototext.append("\"\n" + " } \n" + " } \n" + " attr { \n" + " key: \"strides\" \n" + " value { \n" + " list { \n" + " i: 1 \n" + " i: 1 \n" + " i: 1 \n" + " i: 1 \n" + " } \n" + " } \n" + " } \n" + " attr { \n" + " key: \"use_cudnn_on_gpu\" \n" + " value { \n" + " b: false \n" + " } \n" + " } \n" + "} \n"); + + SetupSingleInputSingleOutput({ 1, 1, 3, 3 }, "graphInput", "potato"); + } +}; + +struct DepthwiseConvolution2dSameFixture : DepthwiseConvolution2dFixture +{ + DepthwiseConvolution2dSameFixture() : DepthwiseConvolution2dFixture("SAME") { } +}; + +BOOST_FIXTURE_TEST_CASE(ParseDepthwiseConv2DSame, DepthwiseConvolution2dSameFixture) +{ + RunTest<4>({ 1, 2, 3, 4, 5, 6, 7, 8, 9 }, + { 2.5f, 5.f, 2.5f, 3.5f, 7.f, 3.5f, 4.5f, 9.f, 4.5f, + 6.f, 12.f, 6.f, 7.5f, 15.f, 7.5f, 9.f, 18.f, 9.f, + 5.5f, 11.f, 5.5f, 6.5f, 13.f, 6.5f, 7.5f, 15.f, 7.5f}); +} + +struct DepthwiseConvolution2dValidFixture : DepthwiseConvolution2dFixture +{ + DepthwiseConvolution2dValidFixture() : DepthwiseConvolution2dFixture("VALID") { } +}; + +BOOST_FIXTURE_TEST_CASE(ParseDepthwiseConv2DValid, DepthwiseConvolution2dValidFixture) +{ + RunTest<4>({ 1, 2, 3, 4, 5, 6, 7, 8, 9 }, // input data + { 6.f, 12.f, 6.f, 7.5f, 15.f, 7.5f, 9.f, 18.f, 9.f }); // output expected data +} + + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/FullyConnected.cpp b/src/armnnTfParser/test/FullyConnected.cpp new file mode 100644 index 0000000..2a7b495 --- /dev/null +++ b/src/armnnTfParser/test/FullyConnected.cpp @@ -0,0 +1,579 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" +#include "Runtime.hpp" +#include "Network.hpp" +#include "Graph.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +// In Tensorflow fully connected layers are expressed as a MatMul followed by an Add. +// The TfParser must detect this case and convert them to a FullyConnected layer. +struct FullyConnectedFixture : public ParserPrototxtFixture +{ + FullyConnectedFixture() + { + // input = tf.placeholder(tf.float32, [1, 1], "input") + // weights = tf.constant([2], tf.float32, [1, 1]) + // matmul = tf.matmul(input, weights) + // bias = tf.constant([1], tf.float32) + // output = tf.add(matmul, bias, name="output") + m_Prototext = R"( +node { + name: "input" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } +} +node { + name: "Const" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + float_val: 2.0 + } + } + } +} +node { + name: "MatMul" + op: "MatMul" + input: "input" + input: "Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "Const_1" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 1 + } + } + float_val: 1.0 + } + } + } +} +node { + name: "output" + op: "Add" + input: "MatMul" + input: "Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} + )"; + SetupSingleInputSingleOutput({ 1, 1 }, "input", "output"); + } +}; + +BOOST_FIXTURE_TEST_CASE(FullyConnected, FullyConnectedFixture) +{ + RunTest<1>({ 3 }, { 7 }); +} + +// Similar to FullyConnectedFixture, but this time the MatMul's output is used by two Adds. This should result +// in two FullyConnected layers being created. +// I +// | +// M -- C +// / \' +// C-- A A -- C +// \ / +// A +struct MatMulUsedInTwoFcFixture : public ParserPrototxtFixture +{ + MatMulUsedInTwoFcFixture() + { + m_Prototext = R"( +node { + name: "input" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } +} +node { + name: "Const" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + float_val: 2.0 + } + } + } +} +node { + name: "MatMul" + op: "MatMul" + input: "input" + input: "Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "Const_1" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 1 + } + } + float_val: 5.0 + } + } + } +} +node { + name: "Const_2" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 1 + } + } + float_val: 15.0 + } + } + } +} +node { + name: "Add" + op: "Add" + input: "MatMul" + input: "Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} +node { + name: "Add_1" + op: "Add" + input: "MatMul" + input: "Const_2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} +node { + name: "output" + op: "Add" + input: "Add" + input: "Add_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} + )"; + SetupSingleInputSingleOutput({ 1, 1 }, "input", "output"); + } +}; + +BOOST_FIXTURE_TEST_CASE(MatMulUsedInTwoFc, MatMulUsedInTwoFcFixture) +{ + RunTest<1>({ 3 }, { 32 }); + // Ideally we would check here that the armnn network has 5 layers: + // Input, 2 x FullyConnected (biased), Add and Output. + // This would make sure the parser hasn't incorrectly added some unconnected layers corresponding to the MatMul +} + +// Similar to MatMulUsedInTwoFc, but this time the Adds are 'staggered' (see diagram), which means that only one +// FullyConnected layer can be created (the other should just be an Add). +// I +// | +// M -- C1 +// / \' +// C2 -- A | +// \ / +// A +struct MatMulUsedInTwoFcStaggeredFixture : public ParserPrototxtFixture +{ + MatMulUsedInTwoFcStaggeredFixture() + { + // input = tf.placeholder(tf.float32, shape=[1,1], name = "input") + // const1 = tf.constant([17], tf.float32, [1,1]) + // mul = tf.matmul(input, const1) + // const2 = tf.constant([7], tf.float32, [1]) + // fc = tf.add(mul, const2) + // output = tf.add(mul, fc, name="output") + m_Prototext = R"( +node { + name: "input" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } +} +node { + name: "Const" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + float_val: 17.0 + } + } + } +} +node { + name: "MatMul" + op: "MatMul" + input: "input" + input: "Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} +node { + name: "Const_1" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 1 + } + } + float_val: 7.0 + } + } + } +} +node { + name: "Add" + op: "Add" + input: "MatMul" + input: "Const_1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} +node { + name: "output" + op: "Add" + input: "MatMul" + input: "Add" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} + )"; + SetupSingleInputSingleOutput({ 1, 1 }, "input", "output"); + } +}; + +BOOST_FIXTURE_TEST_CASE(MatMulUsedInTwoFcStaggered, MatMulUsedInTwoFcStaggeredFixture) +{ + RunTest<1>({ 2 }, { 75 }); + // Ideally we would check here that the armnn network has 5 layers: + // Input, FullyConnected (biased), FullyConnected (non biased), Add and Output. +} + +// A MatMul in isolation, not connected to an add. Should result in a non-biased FullyConnected layer. +struct MatMulFixture : public ParserPrototxtFixture +{ + MatMulFixture() + { + // input = tf.placeholder(tf.float32, shape = [1, 1], name = "input") + // const = tf.constant([17], tf.float32, [1, 1]) + // output = tf.matmul(input, const, name = "output") + m_Prototext = R"( +node { + name: "input" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + } + } +} +node { + name: "Const" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 1 + } + dim { + size: 1 + } + } + float_val: 17.0 + } + } + } +} +node { + name: "output" + op: "MatMul" + input: "input" + input: "Const" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "transpose_a" + value { + b: false + } + } + attr { + key: "transpose_b" + value { + b: false + } + } +} + )"; + SetupSingleInputSingleOutput({ 1, 1 }, "input", "output"); + } +}; + +BOOST_FIXTURE_TEST_CASE(MatMul, MatMulFixture) +{ + RunTest<1>({ 2 }, { 34 }); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/FusedBatchNorm.cpp b/src/armnnTfParser/test/FusedBatchNorm.cpp new file mode 100644 index 0000000..632d5f0 --- /dev/null +++ b/src/armnnTfParser/test/FusedBatchNorm.cpp @@ -0,0 +1,175 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct FusedBatchNormFixture : public ParserPrototxtFixture +{ + FusedBatchNormFixture() + { + m_Prototext = "node { \n" + " name: \"graphInput\" \n" + " op: \"Placeholder\" \n" + " attr { \n" + " key: \"dtype\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"shape\" \n" + " value { \n" + " shape { \n" + " } \n" + " } \n" + " } \n" + "} \n" + "node { \n" + " name: \"Const_1\" \n" + " op: \"Const\" \n" + " attr { \n" + " key: \"dtype\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"value\" \n" + " value { \n" + " tensor { \n" + " dtype: DT_FLOAT \n" + " tensor_shape { \n" + " dim { \n" + " size: 1 \n" + " } \n" + " } \n" + " float_val: 1.0 \n" + " } \n" + " } \n" + " } \n" + "} \n" + "node { \n" + " name: \"Const_2\" \n" + " op: \"Const\" \n" + " attr { \n" + " key: \"dtype\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"value\" \n" + " value { \n" + " tensor { \n" + " dtype: DT_FLOAT \n" + " tensor_shape { \n" + " dim { \n" + " size: 1 \n" + " } \n" + " } \n" + " float_val: 0.0 \n" + " } \n" + " } \n" + " } \n" + "} \n" + "node { \n" + " name: \"FusedBatchNormLayer/mean\" \n" + " op: \"Const\" \n" + " attr { \n" + " key: \"dtype\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"value\" \n" + " value { \n" + " tensor { \n" + " dtype: DT_FLOAT \n" + " tensor_shape { \n" + " dim { \n" + " size: 1 \n" + " } \n" + " } \n" + " float_val: 5.0 \n" + " } \n" + " } \n" + " } \n" + "} \n" + "node { \n" + " name: \"FusedBatchNormLayer/variance\" \n" + " op: \"Const\" \n" + " attr { \n" + " key: \"dtype\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"value\" \n" + " value { \n" + " tensor { \n" + " dtype: DT_FLOAT \n" + " tensor_shape { \n" + " dim { \n" + " size: 1 \n" + " } \n" + " } \n" + " float_val: 2.0 \n" + " } \n" + " } \n" + " } \n" + "} \n" + "node { \n" + " name: \"output\" \n" + " op: \"FusedBatchNorm\" \n" + " input: \"graphInput\" \n" + " input: \"Const_1\" \n" + " input: \"Const_2\" \n" + " input: \"FusedBatchNormLayer/mean\" \n" + " input: \"FusedBatchNormLayer/variance\" \n" + " attr { \n" + " key: \"T\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"data_format\" \n" + " value { \n" + " s: \"NHWC\" \n" + " } \n" + " } \n" + " attr { \n" + " key: \"epsilon\" \n" + " value { \n" + " f: 0.0010000000475 \n" + " } \n" + " } \n" + " attr { \n" + " key: \"is_training\" \n" + " value { \n" + " b: false \n" + " } \n" + " } \n" + "} \n"; + + SetupSingleInputSingleOutput({1, 3, 3, 1}, "graphInput", "output"); + } +}; + +BOOST_FIXTURE_TEST_CASE(ParseFusedBatchNorm, FusedBatchNormFixture) +{ + RunTest<4>({1, 2, 3, 4, 5, 6, 7, 8, 9}, // input data + {-2.8277204f, -2.12079024f, -1.4138602f, + -0.7069301f, 0.0f, 0.7069301f, + 1.4138602f, 2.12079024f, 2.8277204f}); // expected output data +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/Identity.cpp b/src/armnnTfParser/test/Identity.cpp new file mode 100644 index 0000000..ca20de5 --- /dev/null +++ b/src/armnnTfParser/test/Identity.cpp @@ -0,0 +1,161 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct IdentitySimpleFixture : public ParserPrototxtFixture +{ + IdentitySimpleFixture() + { + m_Prototext = "node{ " + " name: \"Placeholder\"" + " op: \"Placeholder\"" + " attr {" + " key: \"dtype\"" + " value {" + " type: DT_FLOAT" + " }" + " }" + " attr {" + " key: \"shape\"" + " value {" + " shape {" + " unknown_rank: true" + " }" + " }" + " }" + "}" + "node {" + " name: \"Identity\"" + " op: \"Identity\"" + " input: \"Placeholder\"" + " attr {" + " key: \"T\"" + " value {" + " type: DT_FLOAT" + " }" + " }" + "}"; + SetupSingleInputSingleOutput({ 4 }, "Placeholder", "Identity"); + } +}; + +BOOST_FIXTURE_TEST_CASE(IdentitySimple, IdentitySimpleFixture) +{ + RunTest<1>({ 1.0f, 2.0f, 3.0f, 4.0f }, { 1.0f, 2.0f, 3.0f, 4.0f }); +} + +struct IdentityFixture : public ParserPrototxtFixture +{ + IdentityFixture() + { + m_Prototext = "node{ " + " name: \"Placeholder\"" + " op: \"Placeholder\"" + " attr {" + " key: \"dtype\"" + " value {" + " type: DT_FLOAT" + " }" + " }" + " attr {" + " key: \"shape\"" + " value {" + " shape {" + " unknown_rank: true" + " }" + " }" + " }" + "}" + "node {" + " name: \"Identity\"" + " op: \"Identity\"" + " input: \"Placeholder\"" + " attr {" + " key: \"T\"" + " value {" + " type: DT_FLOAT" + " }" + " }" + "}" + "node {" + " name: \"Add\"" + " op: \"Add\"" + " input: \"Identity\"" + " input: \"Identity\"" + " attr {" + " key: \"T\"" + " value {" + " type: DT_FLOAT" + " }" + " }" + "}"; + SetupSingleInputSingleOutput({ 4 }, "Placeholder", "Add"); + } +}; + +BOOST_FIXTURE_TEST_CASE(ParseIdentity, IdentityFixture) +{ + RunTest<1>({ 1.0f, 2.0f, 3.0f, 4.0f }, { 2.0f, 4.0f, 6.0f, 8.0f }); +} + +struct IdentityChainFixture : public ParserPrototxtFixture +{ + IdentityChainFixture() + { + m_Prototext = "node{ " + " name: \"Placeholder\"" + " op: \"Placeholder\"" + " attr {" + " key: \"dtype\"" + " value {" + " type: DT_FLOAT" + " }" + " }" + " attr {" + " key: \"shape\"" + " value {" + " shape {" + " unknown_rank: true" + " }" + " }" + " }" + "}" + "node {" + " name: \"Identity\"" + " op: \"Identity\"" + " input: \"Placeholder\"" + " attr {" + " key: \"T\"" + " value {" + " type: DT_FLOAT" + " }" + " }" + "}" + "node {" + " name: \"Identity2\"" + " op: \"Identity\"" + " input: \"Identity\"" + " attr {" + " key: \"T\"" + " value {" + " type: DT_FLOAT" + " }" + " }" + "}"; + SetupSingleInputSingleOutput({ 4 }, "Placeholder", "Identity2"); + } +}; + +BOOST_FIXTURE_TEST_CASE(IdentityChain, IdentityChainFixture) +{ + RunTest<1>({ 1.0f, 2.0f, 3.0f, 4.0f }, { 1.0f, 2.0f, 3.0f, 4.0f }); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/LocalResponseNormalization.cpp b/src/armnnTfParser/test/LocalResponseNormalization.cpp new file mode 100644 index 0000000..a7c2bfe --- /dev/null +++ b/src/armnnTfParser/test/LocalResponseNormalization.cpp @@ -0,0 +1,121 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + + +struct LocalResponseNormalizationBaseFixture : public ParserPrototxtFixture +{ + explicit LocalResponseNormalizationBaseFixture(float alpha, float beta, float bias) + { + std::string alphaString = std::to_string(alpha); + std::string betaString = std::to_string(beta); + std::string biasString = std::to_string(bias); + + m_Prototext = "node {" + " name: \"Placeholder\"" + " op: \"Placeholder\"" + " attr {" + " key: \"dtype\"" + " value {" + " type: DT_FLOAT" + " }" + " }" + " attr {" + " key: \"shape\"" + " value {" + " shape {" + " unknown_rank: true" + " }" + " }" + " }" + "}" + "node {" + " name: \"LRN\"" + " op: \"LRN\"" + " input: \"Placeholder\"" + " attr {" + " key: \"T\"" + " value {" + " type: DT_FLOAT" + " }" + " }" + " attr {" + " key: \"alpha\"" + " value {" + " f: "; + m_Prototext.append(alphaString); + m_Prototext.append("\n" + " }" + " }" + " attr {" + " key: \"beta\"" + " value {" + " f: "); + m_Prototext.append(betaString); + m_Prototext.append("\n" + " }" + " }" + " attr {" + " key: \"bias\"" + " value {" + " f: "); + m_Prototext.append(biasString); + m_Prototext.append("\n" + " }" + " }" + " attr {" + " key: \"depth_radius\"" + " value {" + " i: 1" + " }" + " }" + "}"); + } +}; + + +struct LocalResponseNormalizationFixtureSimple : public LocalResponseNormalizationBaseFixture +{ + explicit LocalResponseNormalizationFixtureSimple() + : LocalResponseNormalizationBaseFixture(1.0f, 1.0f, 1.0f) + { + SetupSingleInputSingleOutput({ 2, 2, 2, 1 }, "Placeholder", "LRN"); + } +}; +BOOST_FIXTURE_TEST_CASE(ParseSimpleLocalResponseNormalization, LocalResponseNormalizationFixtureSimple) +{ + RunTest<4>({ 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f }, + { 0.5f, 0.4f, 0.3f, 0.23529412f, 0.1923077f, 0.16216217f, 0.14f, 0.12307692f }); +} + + +struct LocalResponseNormalizationFixture : public LocalResponseNormalizationBaseFixture +{ + explicit LocalResponseNormalizationFixture() + : LocalResponseNormalizationBaseFixture(0.5f, 1.0f, 0.5f) + { + SetupSingleInputSingleOutput({1, 3, 3, 2}, "Placeholder", "LRN"); + } +}; +BOOST_FIXTURE_TEST_CASE(ParseLocalResponseNormalization, LocalResponseNormalizationFixture) +{ + RunTest<4>({ 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, + 7.0f, 8.0f, 9.0f, 10.0f, 11.0f, 12.0f, + 13.0f, 14.0f, 15.0f, 16.0f, 17.0f, 18.0f}, + + {0.333333340f, 0.66666670f, 0.230769250f, 0.307692320f, 0.161290320f, 0.19354838f, + 0.122807020f, 0.14035088f, 0.098901100f, 0.109890110f, 0.082706770f, 0.09022556f, + 0.071038246f, 0.07650273f, 0.062240668f, 0.066390045f, 0.055374593f, 0.05863192f}); +} + + + + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/MultiOutput.cpp b/src/armnnTfParser/test/MultiOutput.cpp new file mode 100644 index 0000000..56be33d --- /dev/null +++ b/src/armnnTfParser/test/MultiOutput.cpp @@ -0,0 +1,144 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct MultiOutMatchFixture : public ParserPrototxtFixture +{ + MultiOutMatchFixture() + { + m_Prototext = R"( +node { + name: "input" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } +} +node { + name: "softmax1" + op: "Softmax" + input: "input:0" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} + )"; + SetupSingleInputSingleOutput({ 1, 7 }, "input", "softmax1"); + } +}; + +BOOST_FIXTURE_TEST_CASE(MultiOutMatch, MultiOutMatchFixture) +{ + // Note that the point of this test is to verify the parsing went well. + // Here we make sure the softmax has really connected to the input layer. + RunTest<2>({ 0, 0, 10000, 0, 0, 0, 0 }, { 0, 0, 1, 0, 0, 0, 0 }); +} + +struct MultiOutFailFixture : public ParserPrototxtFixture +{ + MultiOutFailFixture() + { + m_Prototext = R"( +node { + name: "input" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } +} +node { + name: "softmax1" + op: "Softmax" + input: "input:1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} + )"; + BOOST_CHECK_THROW(SetupSingleInputSingleOutput({ 1, 7 }, "input", "softmax1"), armnn::ParseException); + } +}; + +BOOST_FIXTURE_TEST_CASE(MultiOutFail, MultiOutFailFixture) +{ + // Not running the graph because this is expected to throw an exception during parsing. +} + +struct MultiOutInvalidFixture : public ParserPrototxtFixture +{ + MultiOutInvalidFixture() + { + m_Prototext = R"( +node { + name: "input" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } +} +node { + name: "softmax1" + op: "Softmax" + input: "input:-1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} + )"; + BOOST_CHECK_THROW(SetupSingleInputSingleOutput({ 1, 7 }, "input", "softmax1"), armnn::ParseException); + } +}; + +BOOST_FIXTURE_TEST_CASE(MultiOutInvalid, MultiOutInvalidFixture) +{ + // Not running the graph because this is expected to throw an exception during parsing. +} + + +BOOST_AUTO_TEST_SUITE_END() \ No newline at end of file diff --git a/src/armnnTfParser/test/Multiplication.cpp b/src/armnnTfParser/test/Multiplication.cpp new file mode 100644 index 0000000..3a20fd1 --- /dev/null +++ b/src/armnnTfParser/test/Multiplication.cpp @@ -0,0 +1,172 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct MultiplicationFixture : public ParserPrototxtFixture +{ + MultiplicationFixture() + { + m_Prototext = "node { \n" + " name: \"graphInput\" \n" + " op: \"Placeholder\" \n" + " attr { \n" + " key: \"dtype\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"shape\" \n" + " value { \n" + " shape { \n" + " } \n" + " } \n" + " } \n" + " } \n" + " node { \n" + " name: \"softmax1\" \n" + " op: \"Softmax\" \n" + " input: \"graphInput\" \n" + " attr { \n" + " key: \"T\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " }\n" + " node {\n" + " name: \"softmax2\"\n" + " op : \"Softmax\"\n" + " input: \"graphInput\"\n" + " attr { \n" + " key: \"T\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " }\n" + " node {\n" + " name: \"multiplication\"\n" + " op : \"Mul\"\n" + " input: \"softmax1\"\n" + " input: \"softmax2\"\n" + " attr { \n" + " key: \"T\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " }\n"; + + SetupSingleInputSingleOutput({ 1, 7 }, "graphInput", "multiplication"); + } +}; + +BOOST_FIXTURE_TEST_CASE(ParseMultiplication, MultiplicationFixture) +{ + RunTest<2>({ 0, 0, 10000, 0, 0, 0, 0 }, { 0, 0, 1, 0, 0, 0, 0 }); +} + +struct MultiplicationBroadcastFixture : public ParserPrototxtFixture +{ + MultiplicationBroadcastFixture(const armnn::TensorShape& inputShape0, const armnn::TensorShape& inputShape1) + { + m_Prototext = R"( +node { + name: "input0" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } +} +node { + name: "input1" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } +} +node { + name: "output" + op: "Mul" + input: "input0" + input: "input1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} + )"; + + Setup({ { "input0", inputShape0 }, + { "input1", inputShape1 } }, + { "output" }); + } +}; + +struct MultiplicationBroadcastFixture4D1D : public MultiplicationBroadcastFixture +{ + MultiplicationBroadcastFixture4D1D() : MultiplicationBroadcastFixture({ 1, 2, 2, 3 }, { 1 }) {} +}; + +BOOST_FIXTURE_TEST_CASE(ParseMultiplicationBroadcast4D1D, MultiplicationBroadcastFixture4D1D) +{ + RunTest<4>({ { "input0", { 0.0f, 1.0f, 2.0f, + 3.0f, 4.0f, 5.0f, + 6.0f, 7.0f, 8.0f, + 9.0f, 10.0f, 11.0f } }, + { "input1", { 5.0f } } }, + { { "output", { 0.0f, 5.0f, 10.0f, + 15.0f, 20.0f, 25.0f, + 30.0f, 35.0f, 40.0f, + 45.0f, 50.0f, 55.0f } } }); +} + +struct MultiplicationBroadcastFixture1D4D : public MultiplicationBroadcastFixture +{ + MultiplicationBroadcastFixture1D4D() : MultiplicationBroadcastFixture({ 1 }, { 1, 2, 2, 3 }) {} +}; + +BOOST_FIXTURE_TEST_CASE(ParseMultiplicationBroadcast1D4D, MultiplicationBroadcastFixture1D4D) +{ + RunTest<4>({ { "input0", { 3.0f } }, + { "input1", { 0.0f, 1.0f, 2.0f, + 3.0f, 4.0f, 5.0f, + 6.0f, 7.0f, 8.0f, + 9.0f, 10.0f, 11.0f } } }, + { { "output", { 0.0f, 3.0f, 6.0f, + 9.0f, 12.0f, 15.0f, + 18.0f, 21.0f, 24.0f, + 27.0f, 30.0f, 33.0f } } }); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/PassThru.cpp b/src/armnnTfParser/test/PassThru.cpp new file mode 100644 index 0000000..8462ec2 --- /dev/null +++ b/src/armnnTfParser/test/PassThru.cpp @@ -0,0 +1,52 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct PassThruFixture : public ParserPrototxtFixture +{ + PassThruFixture() + { + m_Prototext = "node {\n" + " name: \"Placeholder\"\n" + " op: \"Placeholder\"\n" + " attr {\n" + " key: \"dtype\"\n" + " value {\n" + " type: DT_FLOAT\n" + " }\n" + " }\n" + " attr {\n" + " key: \"shape\"\n" + " value {\n" + " shape {\n" + " }\n" + " }\n" + " }\n" + "}\n"; + SetupSingleInputSingleOutput({ 1, 7 }, "Placeholder", "Placeholder"); + } +}; + +BOOST_FIXTURE_TEST_CASE(ValidateOutput, PassThruFixture) +{ + BOOST_TEST(m_Parser->GetNetworkOutputBindingInfo("Placeholder").second.GetNumDimensions() == 2); + BOOST_TEST(m_Parser->GetNetworkOutputBindingInfo("Placeholder").second.GetShape()[0] == 1); + BOOST_TEST(m_Parser->GetNetworkOutputBindingInfo("Placeholder").second.GetShape()[1] == 7); +} + +BOOST_FIXTURE_TEST_CASE(RunGraph, PassThruFixture) +{ + armnn::TensorInfo inputTensorInfo = m_Parser->GetNetworkInputBindingInfo("Placeholder").second; + auto input = MakeRandomTensor(inputTensorInfo, 378346); + std::vector inputVec; + inputVec.assign(input.data(), input.data() + input.num_elements()); + RunTest<2>(inputVec, inputVec); // The passthru network should output the same as the input +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/Pooling.cpp b/src/armnnTfParser/test/Pooling.cpp new file mode 100644 index 0000000..36ffa47 --- /dev/null +++ b/src/armnnTfParser/test/Pooling.cpp @@ -0,0 +1,112 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + + +struct Pooling2dFixture : public ParserPrototxtFixture +{ + explicit Pooling2dFixture(const char* poolingtype) + { + m_Prototext = "node {\n" + " name: \"Placeholder\"\n" + " op: \"Placeholder\"\n" + " attr {\n" + " key: \"dtype\"\n" + " value {\n" + " type: DT_FLOAT\n" + " }\n" + " }\n" + " attr {\n" + " key: \"value\"\n" + " value {\n" + " tensor {\n" + " dtype: DT_FLOAT\n" + " tensor_shape {\n" + " }\n" + " }\n" + " }\n" + " }\n" + " }\n" + "node {\n" + " name: \""; + m_Prototext.append(poolingtype); + m_Prototext.append("\"\n" + " op: \""); + m_Prototext.append(poolingtype); + m_Prototext.append("\"\n" + " input: \"Placeholder\"\n" + " attr {\n" + " key: \"T\"\n" + " value {\n" + " type: DT_FLOAT\n" + " }\n" + " }\n" + " attr {\n" + " key: \"data_format\"\n" + " value {\n" + " s: \"NHWC\"\n" + " }\n" + " }\n" + " attr {\n" + " key: \"ksize\"\n" + " value {\n" + " list {\n" + " i: 1\n" + " i: 2\n" + " i: 2\n" + " i: 1\n" + " }\n" + " }\n" + " }\n" + " attr {\n" + " key: \"padding\"\n" + " value {\n" + " s: \"VALID\"\n" + " }\n" + " }\n" + " attr {\n" + " key: \"strides\"\n" + " value {\n" + " list {\n" + " i: 1\n" + " i: 1\n" + " i: 1\n" + " i: 1\n" + " }\n" + " }\n" + " }\n" + "}\n"); + + SetupSingleInputSingleOutput({ 1, 2, 2, 1 }, "Placeholder", poolingtype); + } +}; + + +struct MaxPoolFixture : Pooling2dFixture +{ + MaxPoolFixture() : Pooling2dFixture("MaxPool") {} +}; +BOOST_FIXTURE_TEST_CASE(ParseMaxPool, MaxPoolFixture) +{ + RunTest<4>({1.0f, 2.0f, 3.0f, -4.0f}, {3.0f}); +} + + +struct AvgPoolFixture : Pooling2dFixture +{ + AvgPoolFixture() : Pooling2dFixture("AvgPool") {} +}; +BOOST_FIXTURE_TEST_CASE(ParseAvgPool, AvgPoolFixture) +{ + RunTest<4>({1.0f, 2.0f, 3.0f, 4.0f}, {2.5f}); +} + + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/Reshape.cpp b/src/armnnTfParser/test/Reshape.cpp new file mode 100644 index 0000000..4eb6b12 --- /dev/null +++ b/src/armnnTfParser/test/Reshape.cpp @@ -0,0 +1,86 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + + +struct ReshapeFixture : public ParserPrototxtFixture +{ + ReshapeFixture() + { + m_Prototext = "node { \n" + " name: \"graphInput\" \n" + " op: \"Placeholder\" \n" + " attr { \n" + " key: \"dtype\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"shape\" \n" + " value { \n" + " shape { \n" + " } \n" + " } \n" + " } \n" + " } \n" + "node { \n" + " name: \"Reshape/shape\" \n" + " op: \"Const\" \n" + " attr { \n" + " key: \"dtype\" \n" + " value { \n" + " type: DT_INT32 \n" + " } \n" + " } \n" + " attr { \n" + " key: \"value\" \n" + " value { \n" + " tensor { \n" + " dtype: DT_INT32 \n" + " tensor_shape { \n" + " dim { \n" + " size: 2 \n" + " } \n" + " } \n" + " tensor_content: \"\\002\\000\\000\\000\\002\\000\\000\\000\" \n" + " } \n" + " } \n" + " } \n" + "} \n" + "node { \n" + " name: \"Reshape\" \n" + " op: \"Reshape\" \n" + " input: \"graphInput\" \n" + " input: \"Reshape/shape\" \n" + " attr { \n" + " key: \"T\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"Tshape\" \n" + " value { \n" + " type: DT_INT32 \n" + " } \n" + " } \n" + "} \n"; + + SetupSingleInputSingleOutput({1, 4}, "graphInput", "Reshape"); + } +}; + +BOOST_FIXTURE_TEST_CASE(ParseReshape, ReshapeFixture) +{ + RunTest<2>({ 0.0f, 1.0f, 2.0f, 3.0f }, { 0.0f, 1.0f, 2.0f, 3.0f }); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/ResizeBilinear.cpp b/src/armnnTfParser/test/ResizeBilinear.cpp new file mode 100644 index 0000000..30d898f --- /dev/null +++ b/src/armnnTfParser/test/ResizeBilinear.cpp @@ -0,0 +1,114 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct ResizeBilinearFixture : public ParserPrototxtFixture +{ + ResizeBilinearFixture() + { + m_Prototext = R"( +node { + name: "graphInput" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 1 + } + dim { + size: 3 + } + dim { + size: 3 + } + dim { + size: 1 + } + } + tensor_content: +"\000\000\000\000\000\000\200?\000\000\000@\000\000@@\000\000\200@\000\000\240@\000\000\300@\000\000\340@\000\000\000A" + } + } + } +} +node { + name: "resizeBilinearLayer/size" + op: "Const" + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_INT32 + tensor_shape { + dim { + size: 2 + } + } + tensor_content: "\005\000\000\000\005\000\000\000" + } + } + } +} +node { + name: "resizeBilinearLayer" + op: "ResizeBilinear" + input: "graphInput" + input: "resizeBilinearLayer/size" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + attr { + key: "align_corners" + value { + b: false + } + } +} + )"; + + SetupSingleInputSingleOutput({ 1, 3, 3, 1 }, "graphInput", "resizeBilinearLayer"); + } +}; + +BOOST_FIXTURE_TEST_CASE(ParseResizeBilinear, ResizeBilinearFixture) +{ + RunTest<4>(// input data + { 0.0f, 1.0f, 2.0f, + 3.0f, 4.0f, 5.0f, + 6.0f, 7.0f, 8.0f }, + // expected output data + { 0.0f, 0.6f, 1.2f, 1.8f, 2.0f, + 1.8f, 2.4f, 3.0f, 3.6f, 3.8f, + 3.6f, 4.2f, 4.8f, 5.4f, 5.6f, + 5.4f, 6.0f, 6.6f, 7.2f, 7.4f, + 6.0f, 6.6f, 7.2f, 7.8f, 8.0f }); + +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/Shape.cpp b/src/armnnTfParser/test/Shape.cpp new file mode 100644 index 0000000..7b414ec --- /dev/null +++ b/src/armnnTfParser/test/Shape.cpp @@ -0,0 +1,94 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct ShapeFixture : public ParserPrototxtFixture +{ + ShapeFixture() + { + m_Prototext = + "node { \n" + " name: \"Placeholder\" \n" + " op: \"Placeholder\" \n" + " attr { \n" + " key: \"dtype\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"shape\" \n" + " value { \n" + " shape { \n" + " dim { \n" + " size: 1 \n" + " } \n" + " dim { \n" + " size: 1 \n" + " } \n" + " dim { \n" + " size: 1 \n" + " } \n" + " dim { \n" + " size: 4 \n" + " } \n" + " } \n" + " } \n" + " } \n" + "} \n" + "node { \n" + " name: \"shapeTest\" \n" + " op: \"Shape\" \n" + " input: \"Placeholder\" \n" + " attr { \n" + " key: \"T\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"out_type\" \n" + " value { \n" + " type: DT_INT32 \n" + " } \n" + " } \n" + "} \n" + "node { \n" + " name: \"Reshape\" \n" + " op: \"Reshape\" \n" + " input: \"Placeholder\" \n" + " input: \"shapeTest\" \n" + " attr { \n" + " key: \"T\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"Tshape\" \n" + " value { \n" + " type: DT_INT32 \n" + " } \n" + " } \n" + "} \n"; + + SetupSingleInputSingleOutput({1, 4}, "Placeholder", "Reshape"); + } +}; + +BOOST_FIXTURE_TEST_CASE(ParseShape, ShapeFixture) +{ + // Note: the test's output cannot be an int32 const layer, because that cannot exist in the + // as ARMNN only supports u8 and float layers. For that reason I added a reshape layer + // which reshapes the input to its original dimensions, which is not changing it. + RunTest<2>({ 0.0f, 1.0f, 2.0f, 3.0f }, { 0.0f, 1.0f, 2.0f, 3.0f }); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/Softmax.cpp b/src/armnnTfParser/test/Softmax.cpp new file mode 100644 index 0000000..1ab28ea --- /dev/null +++ b/src/armnnTfParser/test/Softmax.cpp @@ -0,0 +1,55 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct SoftmaxFixture : public ParserPrototxtFixture +{ + SoftmaxFixture() + { + m_Prototext = "node {\n" + " name: \"blah\"\n" + " op: \"Placeholder\"\n" + " attr {\n" + " key: \"dtype\"\n" + " value {\n" + " type: DT_FLOAT\n" + " }\n" + " }\n" + " attr {\n" + " key: \"shape\"\n" + " value {\n" + " shape {\n" + " }\n" + " }\n" + " }\n" + "}\n" + "node {\n" + " name: \"blah2\"\n" + " op: \"Softmax\"\n" + " input: \"blah\"\n" + " attr {\n" + " key: \"T\"\n" + " value {\n" + " type: DT_FLOAT\n" + " }\n" + " }\n" + "}\n"; + + SetupSingleInputSingleOutput({ 1, 7 }, "blah", "blah2"); + } +}; + +BOOST_FIXTURE_TEST_CASE(ParseSoftmax, SoftmaxFixture) +{ + RunTest<2>({ 0, 0, 10000, 0, 0, 0, 0 }, { 0, 0, 1, 0, 0, 0, 0 }); +} + + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/Squeeze.cpp b/src/armnnTfParser/test/Squeeze.cpp new file mode 100644 index 0000000..d2d7d49 --- /dev/null +++ b/src/armnnTfParser/test/Squeeze.cpp @@ -0,0 +1,108 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + + +template +struct SqueezeFixture : public ParserPrototxtFixture +{ + SqueezeFixture() + { + m_Prototext = + "node { \n" + " name: \"graphInput\" \n" + " op: \"Placeholder\" \n" + " attr { \n" + " key: \"dtype\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"shape\" \n" + " value { \n" + " shape { \n" + " } \n" + " } \n" + " } \n" + " } \n" + "node { \n" + " name: \"Squeeze\" \n" + " op: \"Squeeze\" \n" + " input: \"graphInput\" \n" + " attr { \n" + " key: \"T\" \n" + " value { \n" + " type: DT_FLOAT \n" + " } \n" + " } \n" + " attr { \n" + " key: \"squeeze_dims\" \n" + " value { \n" + " list {\n"; + + if (withDimZero) + { + m_Prototext += "i:0\n"; + } + + if (withDimOne) + { + m_Prototext += "i:1\n"; + } + + m_Prototext += + " } \n" + " } \n" + " } \n" + "} \n"; + + SetupSingleInputSingleOutput({ 1, 1, 2, 2 }, "graphInput", "Squeeze"); + } +}; + +typedef SqueezeFixture ImpliedDimensionsSqueezeFixture; +typedef SqueezeFixture ExplicitDimensionZeroSqueezeFixture; +typedef SqueezeFixture ExplicitDimensionOneSqueezeFixture; +typedef SqueezeFixture ExplicitDimensionsSqueezeFixture; + +BOOST_FIXTURE_TEST_CASE(ParseImplicitSqueeze, ImpliedDimensionsSqueezeFixture) +{ + BOOST_TEST((m_Parser->GetNetworkOutputBindingInfo("Squeeze").second.GetShape() == + armnn::TensorShape({2,2}))); + RunTest<2>({ 1.0f, 2.0f, 3.0f, 4.0f }, + { 1.0f, 2.0f, 3.0f, 4.0f }); +} + +BOOST_FIXTURE_TEST_CASE(ParseDimensionZeroSqueeze, ExplicitDimensionZeroSqueezeFixture) +{ + BOOST_TEST((m_Parser->GetNetworkOutputBindingInfo("Squeeze").second.GetShape() == + armnn::TensorShape({1,2,2}))); + RunTest<3>({ 1.0f, 2.0f, 3.0f, 4.0f }, + { 1.0f, 2.0f, 3.0f, 4.0f }); +} + +BOOST_FIXTURE_TEST_CASE(ParseDimensionOneSqueeze, ExplicitDimensionOneSqueezeFixture) +{ + BOOST_TEST((m_Parser->GetNetworkOutputBindingInfo("Squeeze").second.GetShape() == + armnn::TensorShape({1,2,2}))); + RunTest<3>({ 1.0f, 2.0f, 3.0f, 4.0f }, + { 1.0f, 2.0f, 3.0f, 4.0f }); +} + +BOOST_FIXTURE_TEST_CASE(ParseExplicitDimensionsSqueeze, ExplicitDimensionsSqueezeFixture) +{ + BOOST_TEST((m_Parser->GetNetworkOutputBindingInfo("Squeeze").second.GetShape() == + armnn::TensorShape({2,2}))); + RunTest<2>({ 1.0f, 2.0f, 3.0f, 4.0f }, + { 1.0f, 2.0f, 3.0f, 4.0f }); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/TestDependencies.cpp b/src/armnnTfParser/test/TestDependencies.cpp new file mode 100644 index 0000000..13ab17c --- /dev/null +++ b/src/armnnTfParser/test/TestDependencies.cpp @@ -0,0 +1,296 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +// Graph which tests that nodes are re-ordered in the queue when they are encountered a second time. +// In this case R0 will be encountered first via R1 and then via R2. At that time +// we need to make sure that R0 (and the I on which it is dependent) is moved to the front again +// so that it is before both R1 and R2. +// I +// | +// R0 +// / \' +// R1 R2 +// \ | +// \ R3 +// \| +// O +struct RediscoveredDependenciesFixture : public ParserPrototxtFixture +{ + RediscoveredDependenciesFixture() + { + // input = tf.placeholder(tf.float32, 1, "input") + // relu0 = tf.nn.relu(input, "relu0") + // relu1 = tf.nn.relu(relu0, "relu1") + // relu2 = tf.nn.relu(relu0, "relu2") + // relu3 = tf.nn.relu(relu2, "relu3") + // output = tf.add(relu1, relu3, "output") + m_Prototext = R"( + node { + name: "input" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 1 + } + } + } + } + } + node { + name: "relu0" + op: "Relu" + input: "input" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + node { + name: "relu1" + op: "Relu" + input: "relu0" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + node { + name: "relu2" + op: "Relu" + input: "relu0" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + node { + name: "relu3" + op: "Relu" + input: "relu2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + node { + name: "output" + op: "Add" + input: "relu1" + input: "relu3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + )"; + SetupSingleInputSingleOutput({ 1 }, "input", "output"); + } +}; + +BOOST_FIXTURE_TEST_CASE(RediscoveredDependencies, RediscoveredDependenciesFixture) +{ + RunTest<1>({1}, {2}); +} + +// Tests that a simple cycle in the tensorflow graph will be detected and an exception thrown, rather than the TfParser +// getting stuck in an infinite loop. +BOOST_AUTO_TEST_CASE(SimpleCycle) +{ + const char* prototext = R"( +node { + name: "r1" + op: "Relu" + input: "r2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} +node { + name: "r2" + op: "Relu" + input: "r1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} + )"; + armnnTfParser::ITfParserPtr parser = armnnTfParser::ITfParser::Create(); + BOOST_CHECK_THROW(parser->CreateNetworkFromString(prototext, {}, { "r2" }), armnn::ParseException); +} + +// Similar to the above SimpleCycle test, but has a single node which connects to itself. +BOOST_AUTO_TEST_CASE(SingleNodeCycle) +{ + const char* prototext = R"( +node { + name: "r1" + op: "Relu" + input: "r1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} + )"; + armnnTfParser::ITfParserPtr parser = armnnTfParser::ITfParser::Create(); + BOOST_CHECK_THROW(parser->CreateNetworkFromString(prototext, {}, { "r1" }), armnn::ParseException); +} + +// Similar to the above SimpleCycle test, but with a more complicated graph. +// I +// | +// A2---<---<- +// / \' | +// R1 R2 | +// \ | | +// \ R3 | +// \| | +// A1-->--->| +// +BOOST_AUTO_TEST_CASE(ComplexCycle) +{ + // input = tf.placeholder(tf.float32, 1, "input") + // add2 = tf.nn.relu(input, add1, "add2") // This line won't actually run in TF, because add1 is not yet defined + // relu1 = tf.nn.relu(relu0, "relu1") + // relu2 = tf.nn.relu(relu0, "relu2") + // relu3 = tf.nn.relu(relu2, "relu3") + // add1 = tf.add(relu1, relu3, "add1") + const char* prototext = R"( + node { + name: "input" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 1 + } + } + } + } + } + node { + name: "add2" + op: "Add" + input: "input" + input: "add1" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + node { + name: "relu1" + op: "Relu" + input: "add2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + node { + name: "relu2" + op: "Relu" + input: "add2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + node { + name: "relu3" + op: "Relu" + input: "relu2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + node { + name: "add1" + op: "Add" + input: "relu1" + input: "relu3" + attr { + key: "T" + value { + type: DT_FLOAT + } + } + } + )"; + armnnTfParser::ITfParserPtr parser = armnnTfParser::ITfParser::Create(); + BOOST_CHECK_THROW(parser->CreateNetworkFromString(prototext, {}, { "add1" }), armnn::ParseException); +} + +// Tests that a graph with an input that is not present throws a ParseException. +BOOST_AUTO_TEST_CASE(InvalidInput) +{ + const char* prototext = R"( +node { + name: "r1" + op: "Relu" + input: "a-node-that-does-not-exist" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} + )"; + armnnTfParser::ITfParserPtr parser = armnnTfParser::ITfParser::Create(); + BOOST_CHECK_THROW(parser->CreateNetworkFromString(prototext, {}, { "r1" }), armnn::ParseException); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnTfParser/test/TestMultiInputsOutputs.cpp b/src/armnnTfParser/test/TestMultiInputsOutputs.cpp new file mode 100644 index 0000000..5eea616 --- /dev/null +++ b/src/armnnTfParser/test/TestMultiInputsOutputs.cpp @@ -0,0 +1,92 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include +#include "armnnTfParser/ITfParser.hpp" +#include "ParserPrototxtFixture.hpp" + +BOOST_AUTO_TEST_SUITE(TensorflowParser) + +struct MultiInputsOutputsFixture : public ParserPrototxtFixture +{ + MultiInputsOutputsFixture() + { + // input1 = tf.placeholder(tf.float32, shape=[], name = "input1") + // input2 = tf.placeholder(tf.float32, shape = [], name = "input2") + // add1 = tf.add(input1, input2, name = "add1") + // add2 = tf.add(input1, input2, name = "add2") + m_Prototext = R"( +node { + name: "input1" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } +} +node { + name: "input2" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "shape" + value { + shape { + } + } + } +} +node { + name: "add1" + op: "Add" + input: "input1" + input: "input2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} +node { + name: "add2" + op: "Add" + input: "input1" + input: "input2" + attr { + key: "T" + value { + type: DT_FLOAT + } + } +} + )"; + Setup({ { "input1", { 1 } }, + { "input2", { 1 } } }, + { "add1", "add2" }); + } +}; + +BOOST_FIXTURE_TEST_CASE(MultiInputsOutputs, MultiInputsOutputsFixture) +{ + RunTest<1>({ { "input1", {12.0f} }, { "input2", { 13.0f } } }, + { { "add1", { 25.0f } }, { "add2", { 25.0f } } }); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/src/armnnUtils/DotSerializer.cpp b/src/armnnUtils/DotSerializer.cpp new file mode 100644 index 0000000..1feea54 --- /dev/null +++ b/src/armnnUtils/DotSerializer.cpp @@ -0,0 +1,219 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include "DotSerializer.hpp" + +#include +#include +#include + +namespace armnn +{ + +namespace +{ +std::string Indent(int numSpaces) +{ + std::stringstream ss; + for (int i = 0; i < numSpaces; i++) + { + ss << " "; + } + return ss.str(); +} +} //namespace + + +HtmlFont::HtmlFont(std::ostream& stream, int fontSize, const char *color, const char *face) + : DotBase(stream) +{ + GetStream() << " -1) + { + GetStream() << " POINT-SIZE=" << "\"" << fontSize << "\""; + } + + if (color && std::strlen(color) != 0) + { + GetStream() << " COLOR=\"" << color << "\" "; + } + + if (face && std::strlen(face) != 0) + { + GetStream() << " FACE=\"" << face << "\" "; + } + + GetStream() << ">"; +} + + +HtmlFont::HtmlFont(std::ostream& stream) + : HtmlFont(stream, -1, nullptr, nullptr) +{} + +HtmlFont::~HtmlFont() +{ + GetStream() << ""; +} + + +DotAttributeSet::DotAttributeSet(std::ostream& stream) + : DotBase(stream) +{ + GetStream() << "["; +} + +DotAttributeSet::~DotAttributeSet() +{ + bool doSpace=false; + for (auto attrib : m_Attributes) + { + if (doSpace) + { + GetStream() << " "; + } + + GetStream() << attrib; + doSpace=true; + } + + GetStream() << "]"; +} + +DotAttributeSet & DotAttributeSet::AddAttribute(const std::string& name, const std::stringstream& value) +{ + std::stringstream ss; + ss << name <<"=" << value.str(); + m_Attributes.push_back(ss.str()); + return *this; +} + +DotAttributeSet & DotAttributeSet::AddAttribute(const std::string& name, int value) +{ + std::stringstream ss; + ss << name <<"=" << value; + m_Attributes.push_back(ss.str()); + return *this; +} + +DotAttributeSet & DotAttributeSet::AddAttribute(const std::string& name, const std::string& value) +{ + std::stringstream ss; + ss << name <<"=\"" << value << "\""; + m_Attributes.push_back(ss.str()); + return *this; +} + +DotEdge::DotEdge(std::ostream& stream, unsigned int fromNodeId, unsigned int toNodeId) + : DotBase(stream) +{ + std::stringstream ss; + ss << Indent(4) << fromNodeId << " -> " << toNodeId << " "; + GetStream() << ss.str(); + + m_Attributes = std::make_unique(stream); +} + +DotEdge::~DotEdge() +{ + m_Attributes.reset(nullptr); + GetStream() << ";" << std::endl; +} + + +NodeContent::NodeContent(std::ostream& stream) + : DotBase(stream) +{ +} + +NodeContent & NodeContent::SetName(const std::string & name) +{ + m_Name = name; + return *this; +} + +NodeContent & NodeContent::AddContent(const std::string & content) +{ + m_Contents.push_back(content); + return *this; +} + +NodeContent::~NodeContent() +{ + std::stringstream ss; + ss << "label=\"{" << m_Name; + if (!m_Contents.empty()) + { + ss << "|"; + } + for (auto & content : m_Contents) + { + ss << content; + ss << "\\l"; + } + ss << "}\""; + GetStream() << ss.str(); +} + +DotNode::DotNode(std::ostream& stream, unsigned int nodeId, const char* label) + : DotBase(stream) +{ + std::stringstream ss; + ss << Indent(4) << nodeId; + + GetStream() << ss.str() << " "; + + m_Contents = std::make_unique(stream); + m_Attributes = std::make_unique(stream); + + if (std::strlen(label) != 0) + { + m_Contents->SetName(label); + } + else + { + m_Contents->SetName(""); + } +} + +DotNode::~DotNode() +{ + m_Contents.reset(nullptr); + m_Attributes.reset(nullptr); + GetStream() << ";" << std::endl; +} + + +DotDefaults::DotDefaults(std::ostream& stream, const char* type) + : DotBase(stream) +{ + std::stringstream ss; + ss << Indent(4) << type; + + GetStream() << ss.str() << " "; + m_Attributes = std::make_unique(stream); +} + +DotDefaults::~DotDefaults() +{ + m_Attributes.reset(nullptr); + GetStream() << ";" << std::endl; +} + +DotGraph::DotGraph(std::ostream& stream, const char* name) + : DotBase(stream) +{ + GetStream() << "digraph " << name << " {" << std::endl; +} + +DotGraph::~DotGraph() +{ + GetStream() << "}" << std::endl; +} + +} //namespace armnn + + diff --git a/src/armnnUtils/DotSerializer.hpp b/src/armnnUtils/DotSerializer.hpp new file mode 100644 index 0000000..3cb591c --- /dev/null +++ b/src/armnnUtils/DotSerializer.hpp @@ -0,0 +1,131 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#pragma once + +#include +#include +#include + +namespace armnn +{ + +class DotBase +{ +public: + explicit DotBase(std::ostream& stream) + : m_Stream(stream) {} + + std::ostream& GetStream() { return m_Stream; } + +private: + std::ostream& m_Stream; +}; + +class HtmlSection : public DotBase +{ +public: + explicit HtmlSection(std::ostream& stream) + : DotBase(stream) { GetStream() << "<";} + ~HtmlSection() { GetStream() << ">"; } +}; + +class HtmlSimpleTag : public DotBase +{ +public: + explicit HtmlSimpleTag(std::ostream& stream, const char* name) + : DotBase(stream) + , m_Name(name){ GetStream() << "<" << m_Name << ">"; } + ~HtmlSimpleTag() { GetStream() << ""; } + +private: + const char* m_Name; +}; + +class HtmlBold : public HtmlSimpleTag +{ +public: + explicit HtmlBold(std::ostream &stream) + : HtmlSimpleTag(stream, "B") {} +}; + +class HtmlFont : public DotBase +{ +public: + explicit HtmlFont(std::ostream& stream, int fontSize, const char* color, const char* face); + explicit HtmlFont(std::ostream& stream); + ~HtmlFont(); +}; + +class DotAttributeSet : public DotBase +{ +public: + explicit DotAttributeSet(std::ostream& stream); + ~DotAttributeSet(); + + DotAttributeSet & AddAttribute(const std::string& name, const std::stringstream& value); + DotAttributeSet & AddAttribute(const std::string& name, int value); + DotAttributeSet & AddAttribute(const std::string& name, const std::string& value); +private: + std::vector m_Attributes; +}; + +class DotEdge : public DotBase +{ +public: + explicit DotEdge(std::ostream& stream, unsigned int fromNodeId, unsigned int toNodeId); + ~DotEdge(); + + DotAttributeSet& GetAttributeSet() { return *m_Attributes.get(); } +private: + std::unique_ptr m_Attributes; +}; + +class NodeContent : public DotBase +{ +public: + explicit NodeContent(std::ostream& stream); + NodeContent & SetName(const std::string & name); + NodeContent & AddContent(const std::string & content); + + ~NodeContent(); +private: + std::string m_Name; + std::vector m_Contents; +}; + +class DotNode : public DotBase +{ +public: + explicit DotNode(std::ostream& stream, unsigned int nodeId, const char* label); + ~DotNode(); + + NodeContent& GetContents() { return *m_Contents.get(); } + DotAttributeSet& GetAttributeSet() { return *m_Attributes.get(); } +private: + std::unique_ptr m_Contents; + std::unique_ptr m_Attributes; +}; + +class DotDefaults : public DotBase +{ +public: + explicit DotDefaults(std::ostream& stream, const char* type); + ~DotDefaults(); + + DotAttributeSet& GetAttributeSet() { return *m_Attributes.get(); } +private: + std::unique_ptr m_Attributes; +}; + +class DotGraph : public DotBase +{ +public: + explicit DotGraph(std::ostream& stream, const char* name); + ~DotGraph(); +private: +}; + +} //namespace armnn diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 15b1b24..6bc8817 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -85,7 +85,51 @@ if(BUILD_CAFFE_PARSER) CaffeParserTest(CaffeYolo-Armnn "${CaffeYolo-Armnn_sources}") endif() -if (BUILD_CAFFE_PARSER) +if(BUILD_TF_PARSER) + macro(TfParserTest testName sources) + add_executable_ex(${testName} ${sources}) + target_include_directories(${testName} PRIVATE ../src/armnnUtils) + + target_link_libraries(${testName} inferenceTest) + target_link_libraries(${testName} armnnTfParser) + target_link_libraries(${testName} armnn) + target_link_libraries(${testName} ${CMAKE_THREAD_LIBS_INIT}) + if(OPENCL_LIBRARIES) + target_link_libraries(${testName} ${OPENCL_LIBRARIES}) + endif() + target_link_libraries(${testName} + ${Boost_SYSTEM_LIBRARY} + ${Boost_FILESYSTEM_LIBRARY} + ${Boost_PROGRAM_OPTIONS_LIBRARY}) + addDllCopyCommands(${testName}) + endmacro() + + set(TfMnist-Armnn_sources + TfMnist-Armnn/TfMnist-Armnn.cpp + MnistDatabase.hpp + MnistDatabase.cpp) + TfParserTest(TfMnist-Armnn "${TfMnist-Armnn_sources}") + + set(TfCifar10-Armnn_sources + TfCifar10-Armnn/TfCifar10-Armnn.cpp + Cifar10Database.hpp + Cifar10Database.cpp) + TfParserTest(TfCifar10-Armnn "${TfCifar10-Armnn_sources}") + + set(TfMobileNet-Armnn_sources + TfMobileNet-Armnn/TfMobileNet-Armnn.cpp + MobileNetDatabase.hpp + MobileNetDatabase.cpp) + TfParserTest(TfMobileNet-Armnn "${TfMobileNet-Armnn_sources}") + + set(TfInceptionV3-Armnn_sources + TfInceptionV3-Armnn/TfInceptionV3-Armnn.cpp + MobileNetDatabase.hpp + MobileNetDatabase.cpp) + TfParserTest(TfInceptionV3-Armnn "${TfInceptionV3-Armnn_sources}") +endif() + +if (BUILD_CAFFE_PARSER OR BUILD_TF_PARSER) set(ExecuteNetwork_sources ExecuteNetwork/ExecuteNetwork.cpp) @@ -95,6 +139,9 @@ if (BUILD_CAFFE_PARSER) if (BUILD_CAFFE_PARSER) target_link_libraries(ExecuteNetwork armnnCaffeParser) endif() + if (BUILD_TF_PARSER) + target_link_libraries(ExecuteNetwork armnnTfParser) + endif() target_link_libraries(ExecuteNetwork armnn) target_link_libraries(ExecuteNetwork ${CMAKE_THREAD_LIBS_INIT}) if(OPENCL_LIBRARIES) diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp index 5e9e653..04ab195 100644 --- a/tests/ExecuteNetwork/ExecuteNetwork.cpp +++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp @@ -6,6 +6,9 @@ #if defined(ARMNN_CAFFE_PARSER) #include "armnnCaffeParser/ICaffeParser.hpp" #endif +#if defined(ARMNN_TF_PARSER) +#include "armnnTfParser/ITfParser.hpp" +#endif #include "Logging.hpp" #include "../InferenceTest.hpp" @@ -232,8 +235,13 @@ int main(int argc, char* argv[]) } else if (modelFormat.find("tensorflow") != std::string::npos) { +#if defined(ARMNN_TF_PARSER) + return MainImpl(modelPath.c_str(), isModelBinary, computeDevice, + inputName.c_str(), inputTensorShape.get(), inputTensorDataFilePath.c_str(), outputName.c_str()); +#else BOOST_LOG_TRIVIAL(fatal) << "Not built with Tensorflow parser support."; return 1; +#endif } else { diff --git a/tests/ImageNetDatabase.cpp b/tests/ImageNetDatabase.cpp index 0a235c9..ac4bc21 100644 --- a/tests/ImageNetDatabase.cpp +++ b/tests/ImageNetDatabase.cpp @@ -33,13 +33,6 @@ std::unique_ptr ImageNetDatabase::GetTestCaseDa testCaseId = testCaseId % boost::numeric_cast(m_ImageSet.size()); const ImageSet& imageSet = m_ImageSet[testCaseId]; const std::string fullPath = m_BinaryDirectory + imageSet.first; - FILE* file = fopen(fullPath.c_str(), "rb"); - - if (file == nullptr) - { - BOOST_LOG_TRIVIAL(fatal) << "Failed to load " << fullPath; - return nullptr; - } InferenceTestImage image(fullPath.c_str()); image.Resize(m_Width, m_Height); diff --git a/tests/InferenceTest.inl b/tests/InferenceTest.inl index 64f97c1..83a9945 100644 --- a/tests/InferenceTest.inl +++ b/tests/InferenceTest.inl @@ -55,6 +55,26 @@ TestCaseResult ClassifierTestCase::ProcessResult(cons auto& output = this->GetOutput(); const auto testCaseId = this->GetTestCaseId(); + std::map resultMap; + { + int index = 0; + for (const auto & o : output) + { + resultMap[o] = index++; + } + } + + { + BOOST_LOG_TRIVIAL(info) << "= Prediction values for test #" << testCaseId; + auto it = resultMap.rbegin(); + for (int i=0; i<5 && it != resultMap.rend(); ++i) + { + BOOST_LOG_TRIVIAL(info) << "Top(" << (i+1) << ") prediction is " << it->second << + " with confidence: " << 100.0*(it->first) << "%"; + ++it; + } + } + const unsigned int prediction = boost::numeric_cast( std::distance(output.begin(), std::max_element(output.begin(), output.end()))); diff --git a/tests/InferenceTestImage.cpp b/tests/InferenceTestImage.cpp index 8fc6f12..205460a 100644 --- a/tests/InferenceTestImage.cpp +++ b/tests/InferenceTestImage.cpp @@ -222,3 +222,34 @@ std::vector GetImageDataInArmNnLayoutAsFloatsSubtractingMean(ImageChannel return value - mean[channelIndex]; }); } + +std::vector GetImageDataAsNormalizedFloats(ImageChannelLayout layout, + const InferenceTestImage& image) +{ + std::vector imageData; + const unsigned int h = image.GetHeight(); + const unsigned int w = image.GetWidth(); + + const unsigned int rDstIndex = GetImageChannelIndex(layout, ImageChannel::R); + const unsigned int gDstIndex = GetImageChannelIndex(layout, ImageChannel::G); + const unsigned int bDstIndex = GetImageChannelIndex(layout, ImageChannel::B); + + imageData.resize(h * w * 3); + unsigned int offset = 0; + + for (unsigned int j = 0; j < h; ++j) + { + for (unsigned int i = 0; i < w; ++i) + { + uint8_t r, g, b; + std::tie(r, g, b) = image.GetPixelAs3Channels(i, j); + + imageData[offset+rDstIndex] = float(r) / 255.0f; + imageData[offset+gDstIndex] = float(g) / 255.0f; + imageData[offset+bDstIndex] = float(b) / 255.0f; + offset += 3; + } + } + + return imageData; +} \ No newline at end of file diff --git a/tests/InferenceTestImage.hpp b/tests/InferenceTestImage.hpp index 45dd8bf..34403c0 100644 --- a/tests/InferenceTestImage.hpp +++ b/tests/InferenceTestImage.hpp @@ -119,3 +119,9 @@ std::vector GetImageDataInArmNnLayoutAsNormalizedFloats(ImageChannelLayou std::vector GetImageDataInArmNnLayoutAsFloatsSubtractingMean(ImageChannelLayout layout, const InferenceTestImage& image, const std::array& mean); + +// Reads the contents of an inference test image as 3-channel pixels and returns the image data as normalized float +// values. The returned image stay in the original order (HWC) order. The C order may be changed according to the +// supplied layout value. +std::vector GetImageDataAsNormalizedFloats(ImageChannelLayout layout, + const InferenceTestImage& image); diff --git a/tests/MobileNetDatabase.cpp b/tests/MobileNetDatabase.cpp new file mode 100644 index 0000000..66f297c --- /dev/null +++ b/tests/MobileNetDatabase.cpp @@ -0,0 +1,133 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// +#include "InferenceTestImage.hpp" +#include "MobileNetDatabase.hpp" + +#include +#include +#include + +#include +#include +#include + +namespace +{ + +inline float Lerp(float a, float b, float w) +{ + return w * b + (1.f - w) * a; +} + +inline void PutData(std::vector & data, + const unsigned int width, + const unsigned int x, + const unsigned int y, + const unsigned int c, + float value) +{ + data[(3*((y*width)+x)) + c] = value; +} + +std::vector +ResizeBilinearAndNormalize(const InferenceTestImage & image, + const unsigned int outputWidth, + const unsigned int outputHeight) +{ + std::vector out; + out.resize(outputWidth * outputHeight * 3); + + // We follow the definition of TensorFlow and AndroidNN: The top-left corner of a texel in the output + // image is projected into the input image to figure out the interpolants and weights. Note that this + // will yield different results than if projecting the centre of output texels. + + const unsigned int inputWidth = image.GetWidth(); + const unsigned int inputHeight = image.GetHeight(); + + // How much to scale pixel coordinates in the output image to get the corresponding pixel coordinates + // in the input image + const float scaleY = boost::numeric_cast(inputHeight) / boost::numeric_cast(outputHeight); + const float scaleX = boost::numeric_cast(inputWidth) / boost::numeric_cast(outputWidth); + + uint8_t rgb_x0y0[3]; + uint8_t rgb_x1y0[3]; + uint8_t rgb_x0y1[3]; + uint8_t rgb_x1y1[3]; + + for (unsigned int y = 0; y < outputHeight; ++y) + { + // Corresponding real-valued height coordinate in input image + const float iy = boost::numeric_cast(y) * scaleY; + + // Discrete height coordinate of top-left texel (in the 2x2 texel area used for interpolation) + const float fiy = floorf(iy); + const unsigned int y0 = boost::numeric_cast(fiy); + + // Interpolation weight (range [0,1]) + const float yw = iy - fiy; + + for (unsigned int x = 0; x < outputWidth; ++x) + { + // Real-valued and discrete width coordinates in input image + const float ix = boost::numeric_cast(x) * scaleX; + const float fix = floorf(ix); + const unsigned int x0 = boost::numeric_cast(fix); + + // Interpolation weight (range [0,1]) + const float xw = ix - fix; + + // Discrete width/height coordinates of texels below and to the right of (x0, y0) + const unsigned int x1 = std::min(x0 + 1, inputWidth - 1u); + const unsigned int y1 = std::min(y0 + 1, inputHeight - 1u); + + std::tie(rgb_x0y0[0], rgb_x0y0[1], rgb_x0y0[2]) = image.GetPixelAs3Channels(x0, y0); + std::tie(rgb_x1y0[0], rgb_x1y0[1], rgb_x1y0[2]) = image.GetPixelAs3Channels(x1, y0); + std::tie(rgb_x0y1[0], rgb_x0y1[1], rgb_x0y1[2]) = image.GetPixelAs3Channels(x0, y1); + std::tie(rgb_x1y1[0], rgb_x1y1[1], rgb_x1y1[2]) = image.GetPixelAs3Channels(x1, y1); + + for (unsigned c=0; c<3; ++c) + { + const float ly0 = Lerp(float(rgb_x0y0[c]), float(rgb_x1y0[c]), xw); + const float ly1 = Lerp(float(rgb_x0y1[c]), float(rgb_x1y1[c]), xw); + const float l = Lerp(ly0, ly1, yw); + PutData(out, outputWidth, x, y, c, l/255.0f); + } + } + } + + return out; +} + +} // end of anonymous namespace + + +MobileNetDatabase::MobileNetDatabase(const std::string& binaryFileDirectory, + unsigned int width, + unsigned int height, + const std::vector& imageSet) +: m_BinaryDirectory(binaryFileDirectory) +, m_Height(height) +, m_Width(width) +, m_ImageSet(imageSet) +{ +} + +std::unique_ptr +MobileNetDatabase::GetTestCaseData(unsigned int testCaseId) +{ + testCaseId = testCaseId % boost::numeric_cast(m_ImageSet.size()); + const ImageSet& imageSet = m_ImageSet[testCaseId]; + const std::string fullPath = m_BinaryDirectory + imageSet.first; + + InferenceTestImage image(fullPath.c_str()); + + // this ResizeBilinear result is closer to the tensorflow one than STB. + // there is still some difference though, but the inference results are + // similar to tensorflow for MobileNet + std::vector resized(ResizeBilinearAndNormalize(image, m_Width, m_Height)); + + const unsigned int label = imageSet.second; + return std::make_unique(label, std::move(resized)); +} diff --git a/tests/MobileNetDatabase.hpp b/tests/MobileNetDatabase.hpp new file mode 100644 index 0000000..eb34260 --- /dev/null +++ b/tests/MobileNetDatabase.hpp @@ -0,0 +1,36 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// +#pragma once + +#include "ClassifierTestCaseData.hpp" + +#include +#include +#include +#include + +using ImageSet = std::pair; + +class MobileNetDatabase +{ +public: + using TTestCaseData = ClassifierTestCaseData; + + explicit MobileNetDatabase(const std::string& binaryFileDirectory, + unsigned int width, + unsigned int height, + const std::vector& imageSet); + + std::unique_ptr GetTestCaseData(unsigned int testCaseId); + +private: + unsigned int GetNumImageElements() const { return 3 * m_Width * m_Height; } + unsigned int GetNumImageBytes() const { return 4 * GetNumImageElements(); } + + std::string m_BinaryDirectory; + unsigned int m_Height; + unsigned int m_Width; + const std::vector m_ImageSet; +}; \ No newline at end of file diff --git a/tests/TfCifar10-Armnn/TfCifar10-Armnn.cpp b/tests/TfCifar10-Armnn/TfCifar10-Armnn.cpp new file mode 100644 index 0000000..0d9e16a --- /dev/null +++ b/tests/TfCifar10-Armnn/TfCifar10-Armnn.cpp @@ -0,0 +1,17 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// +#include "../InferenceTest.hpp" +#include "../Cifar10Database.hpp" +#include "armnnTfParser/ITfParser.hpp" + +int main(int argc, char* argv[]) +{ + armnn::TensorShape inputTensorShape({ 1, 32, 32, 3 }); + return armnn::test::ClassifierInferenceTestMain( + argc, argv, "cifar10_tf.prototxt", false, + "data", "prob", { 0, 1, 2, 4, 7 }, + [](const char* dataDir) { return Cifar10Database(dataDir, true); }, + &inputTensorShape); +} diff --git a/tests/TfCifar10-Armnn/cifar10_tf.prototxt b/tests/TfCifar10-Armnn/cifar10_tf.prototxt deleted file mode 100644 index 9abf454..0000000 --- a/tests/TfCifar10-Armnn/cifar10_tf.prototxt +++ /dev/null @@ -1,915 +0,0 @@ -node { - name: "data" - op: "Placeholder" - attr { - key: "dtype" - value { - type: DT_FLOAT - } - } - attr { - key: "shape" - value { - shape { - dim { - size: 1 - } - dim { - size: 32 - } - dim { - size: 32 - } - dim { - size: 3 - } - } - } - } -} -node { - name: "conv1/weights" - op: "Const" - attr { - key: "dtype" - value { - type: DT_FLOAT - } - } - attr { - key: "value" - value { - tensor { - dtype: DT_FLOAT - tensor_shape { - dim { - size: 5 - } - dim { - size: 5 - } - dim { - size: 3 - } - dim { - size: 32 - } - } - tensor_content: "2\033\304\274\307\254<\275\016\354\253=\2520\002=\330\017\005\276\033\203\021\275\300AF=%\310\004\276ft\206\275\250\374;\275\253\301\214\2754\274\275\274TS\370\27487p=`\200\337:h\014\351\275\031\255D\275\375\2461\275\010\210\246\274_\330\266\274\373\373\222=\265\025\242\274\333(d\275\002\255\222\275k\210\221\274=\203\234\275=\301\264=t5\206=\003\023\033> \303\312~\261#=\307\2303\275;\221\244\274\206t\221;\024[\n\275\2724\301\274\260\002\"=\371\035\022\275M\001e<\263\302\200\275:\224`\275\304K\247\274\247\352u<\177\266\277=\255~\220<\237W\001\276\317\3314\275\027E\002M\0214\275\202\375\376\275\237\221\254\275n!\023\276/\270\022\276\213\214\362\274I\247\330<\300\337\237\275\206\304\205;\217l\300=\357\021\234\273\004-\335=\224_\316\275\304\254\313\275m|t\275\037\220\221\275\3250\311\275\251\356\007>\272\253\206>\337\326\020=m\327\260\275\331O\221\275\355P\362\275g2\267\273\202\201F=\277)v\275%\320\020\275\303\210\246<\264k\251\275]\026W=\275\320\215;\340\016G=\200j\004;~;x\273\346\221e=\234\350<\274\220\302\225\275 \202\371<\335\316\256\275\337sl\275Y\366\264\275\243\025\'\275\025-\316=\202\003\262=\271\371\342\274\203\264R=]\255\020\274\344\215o\274\307m\324=\034\033\';5\246\333\275\267\264\225=\345\346\306=1{r\274\257L\014\276F\236:=\023)\317\2739\373<\275\244Zw\356\267\226\2751\'\204\275i\315C\275\352i\270<\234I\007\275\320\307\030>\210<\213<\002\253\274=\232%\212<\376\266\326\274\201$\372=g\2334<+\311\215\275\307\236\241;F\223\221\275<\202\036\275\230\253\351\275\3530\003:\223\0034>Z\210\267\2756\016\000\276R\177\315\275\027!c=\036A\026\276\274\253\213<\324d\223=g\025\355\275\267\016\240<\372\003\211\275\326}G\275\236\0362>\013U\241\275\010\245\306\275x\0161\275\024\352A>f\277\257\275q\223\261=\t\377\005\274\214\214\252=2J\341=%\315\254\275\220\t\305\274I\243\226=w*\226\275\340\215=>\216\315\017<#9?\275\330\036\212\275|\303\271\275e\345m\273$\244a=\3162\272\275\234\360&\275\233p\245\340\347\007\2758o7=,\243r=\031x\251=^\031\017=\376\365\276\275\350\nA\274\301d\255=\201\243d\275V\232\315\275\\\353\364<\210\265\024>\001\275\\\272n\255\355\275\227\224a=\003\'\026\275]\317\303\275\201\260\265\310\200\367<\023\354\014\275\242\262\341=\031\310\371=3X\326=ns\007\275\227i\300\327\314-<\273\306r\275 T\225\275o\340\231=\377\233\300\275w\345\250<*>\332=5\355\r\276\304\265\004\274q_\216=\217i \275[d\027>6P\223:u\237\204\274\272O\325\273\234\265\2753\303G<\032?\223<\234\363\315\274\271@d\275\006_J\274\351~N<\275\337U=Z\273L\274\314$b\275\344u\370<\264dK\274\215\216\270\273.f\240\275\177\267\237=\310\250\350=>\303\202=\315#\177\275\2241\220\275\003\003 \250n\244=\032\302^\275\352\235n\274dp\252\275\352\034\252\275\307\177\203\274`l\263\275w\021&\275A\242\341=\305\225\362;\277vV\275\327\311\267\274m\221x1\220:g\322\'\274\271\354E\275ln\361\274\320\376\326\274c\301$\275|\004c=&F\r>\332\'\024\275U\005\230<\244\216\007\276\350j\035\274_\014\215<\335\242\234=\335\276\035;\222\262}<\251 \346=\267\267\265\275-}\213\275\227s\031\276\263\307(\276\001\024W\275,\340\217\275<\351\334\275,\037\206\273\272\377\324\275\235\366\007=\341~\200\275\247;\001\276\275\273\020>\023\367\316\275t\350><\334b\230\273\032\217$>\244\247\221\275\t\231$\275cu\034=\3276\025\274+N\033\276\376\366\315=\226w8>\000+\025>\244Ah=Z\201F\275\362F\021\275\212\244#>\265\201 \274\314{=\274\253\252\021>t\033\032\2752wL=\024\004\230\275\240\314\323;\351\223M\274\242\226\201\275\014\354>\275\337\223\325:\037+\220\275\351\224\350<|X+\275\034\177\367<\321}\001>A\234\251\275\222\253]=b;\371<\240\320\001>\313\313\010\275M\035\242\275\365\303:=\002\3364=6\003=\276\244\211\234=\374\3525>\202\264\265\275\354\342\325=\325\340\003>\032A\030=\333\016\252=\355\2141=`\371\235O\233\304\275\033\210p=`\246C=r\000\217\275\334\032\003=\212O\032\275\016b\356=\035\253\271<\007\242i\276/v\252=\246\314\">:\010g\275\265\376\022=\204\204y\2754^\"\275g4J=/\020\014=\211\215(=\226V\037\273\001\212\344\2756u\337\275\3170\r=\2266Q\276SVF=\316\223\377\275\330\236\361\275\334\314\002\276\007\246t>(\355\371\274\361\253X=\352\256\331\275J~y>\357\274\217\275\254\301i\276\245\232\262\275W\367\370=L\336\021\275\177\271\007\273\217\324T>d]\026;\r\317\325\275\272\303\027>\225j\205=\375W9>\212\212R=#~\333\275Z}\225\275\371\345\024>\347D\242<\032,\325\274`\357\311\274\352\247s\275\363\032\016=^P%<\223L\025;@@\274=S\371\352\275&\346U\275\322\344\336\275\305\326l>4`\310\274\re\276\275\001\336#=\376\356~>\261Pc\275\016\303A\276S9\001\275G\255\215=\025\375R=\211\327\227\273\nR,>Drm\271\343\320\342\275z\033\366=\243\302\365\201\255>P=\351\233\r\275\330W\003>\210\304|>\017\"\204\275bl \276CW\216<\207\031f\275\033\331\353=\234O\331<$\307\201>62F\007dG\276\246\217s=\220M\203\275\326h\003\276$v\211\275\267\314\216\276\326\271\210\275\376\000\022>\264\013N\275\253\302{>\240\222\275<\232w\260\275\231\235\216\275\\{U<\331\t\006\275Y32=\000r\035\275\373\014R\276\0276^\273,\371\'>,\231\210\275\330\343.>b\000\222\n\277+<\3704s\273\214\325X\276\023\355\223\275Bu\222\207\231\t\274\332b\305=\033\030N\275\311\273\224\275\021\354\206\275<\372\253\276?\261E\275\222\030\r\276\276\257\371;\340[\210>\240VD=\000\027\027\276\261S\276\274p\266n\274\343\303\203=G\004\244=\031s(\276Z\037B\276BD\314:\204M\017>\332\345\212\275h\271\362\275f\217\371<\344Yb>L1\201<\022;\340=b\n\230=.\204\234\274\263*C\276\355$\001\274\017\376\203=\303\321?>/ \322=\374f\346=z>\004m\347 \327\257\027\275o\"^\275\"\203f\2759_\003\276\3000%\275\013-C=}\000\r=Q\215\002>\335No=\305O\265\2758\352\341\275c}[\275W\331\t\276\355\200\027\275C\277\251;aA\244\274\346\325P=\036}I>\246\"\207\275\003\2146>\220fO=\010\273\032>\366f\255=)+\306>\243\000\257\275\340r\036\275}g-\275\234\322\342\274\235\360\350\275G\304w>s\t\021>\360k\025>eA\353\275|\373\260=\205\357\237<\250?\277\275_m\314\275Y\3319>\331\030\007<\200X\206<\272~V=\356\374\177\275\265\375\323\204\324?\275a\310\303\275\354\310\034\275\026\003%>-^\365=po\247>]e=\275\233\205\331\274\001e\305<\"TU;\320cV\276\340\363o>z0!>\316\354\337=\347t\t\276\352\272\241\275\270\342\232<\361\2425>+\351F\273X\273\273=\361\341{=\306\214R\275\236\337\017=\362\2057\305\366y=@\020\017\2762\365+\275W{B>\361\"c=W\353\250>,\344\260<\204t\022\274\235\035|=\230\240\220\364]M>\301\027\271=\355 \373\275\025r\036\274\266K\230\275D\204\024\276\023i\"\275\265\327`\235\366\276:\333\343\036=\025+=\275\220\020\200\275\253N\273\275h\356\331\274\314\217)\275[:\205\274\205\021\213\275P0\266=\257\037\314=\223\371y\275\237P%<\234\307\255\274\367\321a\275\265\372\255\275\224\375\t>o\027o\275\017O\270\273v\243\265=\370\234\210\275i\304,=\335\0025\275\336\351r\274\363\376\307\274\351\225\013>q\375\215=e\232~=3g\030\275\246[L\275 \332\005\275\344\367\r\275\317\t\231\274>\235\234=\022\355b\275\241q,\275\262\010]\275\366\3009\274\222\272\233\274\272 \225\274S\341\344\275\346\366\016>\303\242\260=\003\346\247\275\371k\243\274\365\3006\275\005\322\367=\322\030`<@\342\220=N\206\023\274\014G\240\275k\265\234=c\217\3479EeM=\374\001?=\354\276-=\210f\245\275\007P\362=X2\006=\036\003\240=\025>\254\275n\2649={n\345\275po\343\275\303K\350;\3001m\272\261\212\302\275\227\273\227<\314\237\247kp\265=\014\017\261\275\275\246\306<\3409\005\276\303\234w\275\177V\215\274b\346\2708\022-\235<\254\307/\275^\200\337=\025E\222=S\035\333\275\354\3143\274\204\301\262\275O8p<\375\023\252<\373&\275\275\214\213.>\275\355\375\275\033\237\374<\340\330\003\27685\n\276y\016\372\275{\025\205\275\247\010\313=\361y\026=\006\214/>\003\217\325\275\375\014m\275r\256(>9\227N<\330_\034\276\365\216W\275\235#!>\206\244\301=^\024|<\221\220G\275386\274\221\030\252=(\032\224;h\377$\275\340\362\020>\303;\027=\232\235\001=C\363\026:y\353\361\274cc\225=7\347#<,\363\032\275\274\307\025>te\215\275H\263#=\002\226\351;\253\244A=X\301\034\276\233\224\020\275G+\010>\013(A=\325n\372=4x\256\275E\225\245\275\031o\367=D\260\252=2\357/\276b\2525\275\014\031#>\0229\224\275\004I\224=`A\r>\241\321\032=\2416b=\253[R=\304\024\200=i\260\264=\'\342\221=\027\006\213=\010!\245\275\017\344\323;\254\265\221=z\010\031\275\224z\351\275\253\224\017>;\211\006\276~\021\273=\026K\026>^H\032>[5\361\275\235\332\202\275n\375\260=-\264\372<\204?<\275\3024\225\274\325\321)\275\241\247g>\237\017I=B\026i\276qB\353\274K\311\021>\236\222\253\274$\336\203 u\357\275P\347\221=\033R\332\275\211E]\275P\240\253<\267\260\225>\267\315\004\275\351\363e\275\263W\352\275\362\3776\276\004S\365\275\301.\250=\000\r\365=\300:\231<\027)\366\275\300\034X\275\017\261\375=\334\270\276H(u\275z\272\000>\025/\346\2750\363w\274_8\255<3\242\244>\374\320l\274\322k\333\274\013\312\t=\362\267T\276P\341a={d\014>P>Z;\353\325\016<_fO\273N\256\371;S\207\r\276\220\275\356;\244\236\364\275\370\374F\275?1\244=\350\345\333\275\344\031M=\026\221<>D\r\025<\324\025\266={\373\212=-\212H=\250\021\037=\022c\357=m\202\323=\276\204u>\271\371\301\273\373\343\010>d{\252\275\020\333\276\275\267\332\375\272\004\251\221>>\320\224=\242\r\301=Q\002\375=\266J]\276H\023\274<2\206\372=\330k\314<\323\2229\275\211\237\202=\362M(=\nF*\2755\270\315\273\032|\025\276&XY\275\007\200\375=ZG5\275yh_:\254\331\002\276\324\367h\275!\006\255<\317\205\016=c{\033\275o>\227\276\226W\025>\3633\340\275-\002\255\275\236\352\330\275e\250\224\275\203\255 \276\326m\006;\326FP\2766\224\274\276\3367i\275\343R\237\375\201@<~\005\271\275\312\237\224\276\255<\267=\261?\037=:r\024\276\264C\201\275\026\024u\274\023z$\276\013\315\254<\325/]\276\002\240\307\276\225\263\n\275\330\306\224\275-\222F\274\332|\214\276\346\263\326=5\313\312\276\370U\371;OB\201\275UB*=\320y\367=-N\276\276\2670l\276.\245n<\271\037\313\274~\233\230\274\025\302\303\275E7\355<,\211\203>e\260!<\322\221\266=\326ca=Y\337\233\256f\333=\363\003\257\275\270\224\271:X\334\267\274(M\251\275\021\365N\275\216\275q\276\253\265\240\276W\272\216=$v\217<](\004=C\322\216\276\343\022q=\010~\257\276z\212Q=\212\354P\272\"\235\270=\320\225\022>\271\331\234\276\312\r`\276\\\2628=\310\230 \275\261\034\013=\223\215\311\274\240\277<\275\355\016\372\275\247\273@\275r\272\210<\025\224k=\2562\031=H\261\334<\032\374\371=\013\260\001\276\261\343Y\276\366r\254\275\217\017\350\275U\346\"\276\301F\351=\036\362)\276\2611\203>R\025g\275\212\354\261=^\035\267=\223\225<\276\006\021N=&\231\202\274\364\275U\275\312\004\006\276~9P\275<\271\303\273\247\267\363\032\245\006>\312\322\n=Y\361\001\276\022\202\265<\260\342J<\315\274\344\275\377\237\275\275n\315\023>\212\241\273\t\027\031\275b\230c\275~\266a\275\377\346f\276b\036\311=\271\3174\275\266\307\224\274\323#\231\275\027)\321\227v\'>\020\016\214\274N\331\023\276\273\246\227\275\210\030D<\245$U>+7\3529\0073\217=\021\207i=H\313i\252\033\215=v\331\345\275\256PA<\314\005\367\275\373\370\206\275\221\277a=0 `\276@\335{>l|\200=4ny\275B\223\261\275\024db\276Z\217\'=\017\242P\274\231r\026=x\325\277<\212\357\210=\352\334]=\337\343\333\274\201\257\243>\360MM>/\253N\275l\013\370\275\314)\204<\274\352\212\275\255\251\n\276\234M\000\275\342\216\351;\312\315\026\274a<\021=k\205\317=\265\235y=\351\017\201\275fJ\316<\336\016\033\275As\221\272_\226\200\275\037\000\377=\340u\317\274\237\246\301\275i\002\200\275`H\257=\3327\017>\320w\255\275\336`2\274\017\246\">~\027;\275\324\363\350\275(\216\340\274\202\226\320\274/\314\315=\235\261\242\275\355\321\220=8 3=\305.\324\275o\000A\275\334h\307\274\006n\206\275\344I\352\275X\361\330=\315\322\'\275/\027V\274\203\246\257=\270z2\273\rv\320I\t\326\274\316;*\275\250\236\001<\214\337\335;T\241\367=fw\374\275:\002\001>\210\027\270<\274\220\005\276\262\023\346\274h\212_\275;\233 >S\016\232\274\357Pw=\033V\202\274\256\031\026\274\347-\254=\330du=\355\214$=Q\256\023>\000t\004<\344d\236\275\024Y\302\340t\n=\251\037\213=\240\033\227<\362\210@\2743\326\255=\221\2551\276r\257\007>\316I\037;:\225\364\275\001\317T=\235\003\377\275\324\241n\275\211h\010\275~L\232\275\261\250\320<~\354\325\275\006Q\345=X\327D=\372M\374\275?s\265=\267\205,>6\305\301;N\035\333=?\363\200<\335\330\230=\350\364?\275\014\213\325\274\013]\007\276\363d\375\275\026\275\202<\033k\225\273\341}\266\275v\246\200=\002\332\324=\314v\254\275j\222\257\275\353\336\323\274\201g\\\273\023c\300\275\0236\027\276\205\210\305=\320J\370<\205\370;\216dd\274\327$!<\243r\326<\256\006%=\007S\251=\267t\310=\025\234\025=u\317\214<\337\003\344\274G\300K\274\026\311\224=\363\323\000=\326\007\266\273`\210p<\202\246\305\275\254,D=#\013\243=s,\277\275\254\201\n\276-;\224\275\270\243\227=\264\032\347\275\233\016\350\275\352\343\267=\257\347\320\274I\210\274=K^\267=\322n\204=\004\001\006\275\236a\215=N\360\214=\344\250\233=\222\351\002\2742\003K=\317\245\241\273\t\031\r\275\230\014\223=\235v\361;&\276\251\273E\377-=\220o\253\275u\0303=y\2308>\034\004\034>\364\312\251\272\224\347\222\274\210\241\"\276\374\025\014\274)\212=\275\307R\000\275\371\230\260\275\010\224%\274r\n\020=N\315\036\276\304\315\312\275)0\216=\232\210t<\316\010\351<\352A\341\274\t\004\306<\032^\366\275>!\214<\223\202\273\275fr1=xM4=0N\007\276\375v\260:\236\362M>n\324k\275V\r\263=bx\264=\331E\213>j\024H>\034\032r\275$\036\000\276\270^\252\275q\340F=\251\370\201=H\025\214>^\233e<\326O\244\275\263\317\227\275\271\257\024\275+\372t\276\357\246\252\273\270Gr\275\276\027\035\276`\311\222=\374\003B=E\233\307;E8\337\275\225(\374\256yK>i\326!\275\371Y\207=\316`\204<\346;G=7W\273=\006$\233>S6z<\3071t\275\032\3176\275\306x\204\275@j\217\276\3713\242;\376\250C\275\037\242\n\276\376\321q=\374\205_\275\305we=\302\370\016>\237C\027=\333\255\255\2759\222]=\006g\273=,6\335<\031\317\206\274yG\234=\350\232\240\274\272\3272\275I\254V\272\204\305rJ\2566>4\237;=5\320?>i\216\341=9\222\022\273\203J\206=\257#r>r\255\031\273\245Wl\275\312K\302\205<\022\276\235SY;G\300\251=\035\253]>\347\227m\276\026\202\\\275\276\270\276\275f\007\310| \213\276\354\204\026\275\210\"\021=\312\267\023\275\316\201\303=\332\363\226=Q\276\247=&\315\357;\336\347\017\276#\257\216<\342\273o=\316\211L=\341\020/\276\224\211\362<\243@)\276$\t\013\273\226\254$\275\344K#=\343\214J>;\213\370<\260\223\352\275\301\037-=Z$\206=b\tM\276\211?\002\275\366~\314=\215\326\034\276B=\017\275\223\350\341\2756\322u\274\2740>=[\310+>\036IO\276\261\306|=Ii)>w\364<\274E\035\375\251\3101>\244\330\300\274=\251\250\2750\232\315=\344l@=\235\274?>\254o\026\275\343\331)\275UP\377=\244x\340=\035\355\255\274\330\250\205\276\253\236t\275G\t\035\276lB\267\274]5\354\274\251\262t>\023\222u>%il=\tI\372\275\364\322\212\275\266\271\036\275\3352v<9\346\351\275)\373U\273;\271\221\275\315W\033=\204\272\027\275n\200\242\274\305\270#\275\301\341{<\251|\372=\235?\335<\343\007\211;\250\242\250\275\254\351i=\225\274\032\274?G5>\351\277\360\2749\374G<\005\324\221\275\036\016\324=5\000\371<\tN\207\276^\204\374\274\264x\217\275N\221\351Z\327\201>\225,\305=\027\204\034\276\304\345\251\275\212\004\321\274Y\346\304<\350\342\'>V0\351<\305\224\265\275\300\351I=\362\376{=\263X\036;\\c@\274\303f\241=F\036\\>a\242\026\275\234\350\230\275\325\237\005\275\031\005\243=\035\372\034\275\001r1>\020{\203=n\212\177=\203\255\363\275\260w5=x\371%\273\271\354x\276A!\006<\247\002\006=\344\034\207=Q6\374;/\005r>1\300p>\010G\000>\357T;\276\271\232b\275Q\364M=\215\274W\275\360\370\347\275{5\315;\353{\240\275@*\211<\305\357\314\274=\241\333=\004\211b=9\2026\275-\220\024\273\334\220\000>\303md=}E\350\275\375\317\227=\3136\301\275J2\223\275\335\304\261\274h\017e\272\026\202\027>\215\215h=\277\326\212\275\013a\347;-\363C\275\002\327\004\275\213c\221\274\235\"\207\275w`\267\273w\314\205\275\177\210:=\325\254\233\275G\203\236\275;\035\267\275N\177\342\274\2508\203\275Ge\200\275\020\254\237\275\225c\224\273m\020k\274\260\250\371=\324\021\364\274?v\t=\251\032m\274\003\330\305;\316\365^=\246?\217\275\346\263O=ya\010\276\355T\355\274\237\342\344\274(\215\024=A\004!\275\300\017\230=+\t\265\273\364yZ=\000V\345\274\rG\252\002\251J\274\010\360\216\275IY\211\273\277\260\035=\361\356\371=?\306%:\31150=\366\355\273=\262\340\006\275\026\312Z\275\024\'@\274\274\350\246=\036\032\362\275\310u\237\275A\311I=%\214|\274[\031\023\276\325\2411=\315\320\033\275\263Ki=\201\331\245<\322n\317=\021\213\275<\377R\207\275\003H\240<\rW\r\276\\\324\257=\342N\315\275\234k\257\275W7\233=\351\347\325\275\300u8\275w\261\225\274>\233\323<\322\354\002=\344j{\275u.\200=\342f\301<\244v\017\276\275\336/\274Z\214\021\275j\266\267\275\262\022;\275\376\342\211\273\224\201\n\301\352\325\275\204\327\266=%\217^=\215\371Q\2753\007\232=\262\017~\275\375.\356\275=\3330\274t\014,<;\317\007=>\220\253\275sh\237=}\303\376\275w\337\350\274v\360\034=u\270T=\366!\273\273\'^wX>=\22260=E\226J;`O\241\274~\322\260=\356G\201:\276\265\236\274K\302\213=w\362\231\273\264\254\034\276\325x\303\275F\326\276\274q\351\233=\243\025\305\274C\247\207=\316\003\271<\021\370\320=r\275\253<\212\372\264\214|=|_\256\274\235\330\030=-\273\t\275=\2443=\315h\025=d\205J<\233\202\031\276,\0148\275]r\000=\315\327\242\275a\322\030=@{9>3\302\356\274g\202\256=\356}\323;\226\214\227=0f\304\275\221\276\200\275\224\030\321\275Rk\252=\264\033\326\273\225\200@\275\376\3251>|\212D=\260\024\003\276\332s\312\274\207\237\333=@ \037>K\273\224\275q\364\252\273\337g\210\274\256y\250;\035f\004;W\346[\274\326-\343\274L\345}=x\036\275\274\236\205\244;M\267\224\275<_\233\275\016\345f\273\222\026d<\346{\207<\200\0172\2757\306`<2\2709=\036\221\200\274]\236:=\r*\000\2763\022t\275\224\0016=N\370/\275\2444i=\355\014\315;jdR>\253k\t=\326\",\275\030\0218\274\303\227\253=7\315\r>\331\201\247\275\267Q\347;6\323T\274\314b\223\272\273\2359\231E\025\276\355^\211\275\365\346\331\272\276\355Z=;O\274=\203-\257\275\365\024\341(\311\237\275?\213\202;\026\210\364\273+\213\031>\372\202\r=\371\212\250=\332\n\005=\003k\357=\363&\r>\232\325\244\275\225\241&<\030*\031\275\330iG=\267\363X\274\t\366\364\274D\237\230<0\340m=)\260\337<\2013\315\275HXs\275\313\356d=\014r\232\275\223\320\364<\300\363\032>>n\177=<\275\376<\341&/=\371Z\020>\362\207\263=!;:\275X\021\255\275\347\330#>?U\204\274\236\346\276\275\205_\224\275\211\230\310\273\3751\336\274\004\353i\273\216q\023=\27598<\270\021\344=,\037\243\274\024Y\213\275e1\317\274\222\264\212\275\036\275<;\213g\\\275W\274]=\324\330Y\272O\2751<\323\026&\275\340\001b\2756\355J<5\344\344;y\272\017<\251\033\021\275%\255\225=(\334X<\215\364\250<\027\253\247=>Z\225=\357\331E\275z\317J\372\352<\350Q\024>\246\343:=\310\236g=^\307\313\273\331\260\002>\360c(;\242\355\276\275\235\205\226;\273\253\'=\234\007\000\275u\335\206\275\305@\325<\277|h<\335!\271\273/\354}\275\202\217\231;D\300\217=#\272\221\274\232\005\324\275\304\210\t>\222\3520=.\243\020\275J\222e<\355[]=,\220\037\275x\n\"\275\342!s\274\367\366\021>t\3555\275\340I\254\275Qn\026\275_\356\217\274\252\352R8\0108\320;\324\360\351\274Zn\361\2759uS\274\256\233\253<\373!\302\275\301-w\275ky\261\275\201\036-\275\272\002\364\273\225\211\354<\361L\022\275\327j\242\274\332\201j\274P\361\014=}~\200:\177\016\344<\023\007\272\275\324\237\177\274\240\375(=\"s\226\274\271L\205<\326\323#\272\341\035\306;\000\333j\275G\263\030=\372\261\001\275G\273\360<\360B\377\274\305=\266\273}\305 \274\256\021\334<\t\340\007<\300$9\275\344\033\211\274*\324\360\274\265\221D=\036\r\207\275a\033\222\275!\031==\210\303\353\274\212\232v=b\261\036=0\333?\274\374\020\247\275\216KA=gwt=\312E\236\274\216\034\007=\350\2431\275\206\354\300\275\370^\214\275x\223\003=\357\217\203==\224\270;~\036\022\275\227\350b<\334k0<\252\333\034\2765\224\220\272#\216U\2752W\313q\274_\014\300\274\210\277\320\273\t\027\261\274f\211\306\273j[\312\273s.8\274a\365\253\273(T\230;\325]\217<\240\326\252\273[\027#\274[\360g\273\326\326%\322\224=Bo|\275g\344\263\274\267M\316<\005V\235\274\226l\003\273\001\033%\275h-^\275\372\304\377<\314>O<\033\360\210<)-\313\275\267\350\336\274\321\227\3429\246\253\354\272\355\373Z\275\'S\016=\200!\266\274&\247\233t\235\274\274#\322<\356\230c\274\345#}<\023\263,<(\305\014\2758\030-:\323}\214\274\344H\363<9k3<}D\'\275\310\334\273<\033\035g<\311\355c<\306\364\306\274\000s\327;\233\220\254<\350\333\001\275\204\370\312\273\365\343\313\273\300\355\026\274\313\273\235\273W\031I<\337\362\217\274\364<5=\233\261\006;\253J\036\275\322\032\313<\032\033\212\275\224+\263<\016\243\007\275E3\304\274\t>l=\361\377\331\274\242T#\273\370t\334\274D\215\237\274\'-\036=s\274r\275XD\202\275\260n\240;\264\005\211\273\215\352&\275%\223b<\264\232\215<\240\321D<\235s[\274%\234\331<\274\0239\274\337\036\332\274x\005\240\273s\334\222<\024qP\274\346\017\177\273\036\001\266\274\017\021G\274\313=\367\274\351J\030\274\242\252\347\274\221c\035\2740XN=5d\017\275\230\250\002=q}\035\275\234\026:<;\246P\275\3479\363\273\2125\324\274\017\342\033\273\033\030\203<$Ua\273l\267\216<\217\250K<\000v75\254\367#\274E\3243\274Dj\245<)\201\037;\315\245\217\274W\016\226\273\323\r\204\273\275\323a\274,\366\317<\324\235\010=\300\332I<\321\210\246\274-\222\260\274\0315,=!Y\004>\010dc\275EOu\275\225\254\017\275E\007;\274\274f\013=!@b\2754\017\222\275\323\231\037;\357+\213\274\272\005D<{\213k\274\030\206\303\273\222\3539\274K\227\357\272\236:)\275\277i*=\031_\330<\003\\\212\273\354Y\307\272\230Y3\274ec\315\274\037j\270;e%\201;\365\345\207;E\346C\274\273\216\010=\275ID<@\236\324\2744)\342<+\376\2419\ta\000\275\021\304\277\274\275*\247\274\305)\232;\215F\001<\212\177-\273.\216-=\363\254\"\275\344\315F<\361e5=7\273A\275\364\273\305\274sV\215;i\234\003;\0232+<\312\207\324\274\321\272\t=\177A\031\275\361\2328\274\232\377\310\273$\272\261<9&\346\355-=\002\275\207<\301\256\255\273k\000\335<\247\3311\275dqM=\213o\301;L\r\025=\346\370$\270R\316\031P<+\353\240\274\253r\202\275\023q\355:\260\214$\275\262/\361r;\354\006o=R\370,\274\367\006\210\275\014m\342\274\372\203D\275\276\0330\275\346\323)=^Q\255\273g\034\215\350\006\274\346\220\252<\330l\363\273\020U\032=\323\230\014=\334M\231\274\203q\220\274\356x\200\273\200\023\3449Kf^=\260\345\200\274}\245\267<\246\313\023;\037\262\016=\342\353\n\275mnX\274mY\357\273\033\0316=t~\371\272\351\3727\274\327\250,\272J\002 \2758\023\032=p|\304;\367wW\275\222A\203\274\321w\030<9\254\324\274?\2563;~\031J=$\247\333\274\210?\263=5\330\034\313\274D^\022<\315\014`\274o\316\206;\215R\250\274k\252\006=*\213{<\253A\036\275\037<\242\275\022KI=H\245\021\275#\214\276<<\343\235\274\"Z\307<\303\177\234\273\330v\'=\304-\322;,\314\227=)/\320;\344B\n\274\006\344\377\273\361\276\225\274\306w\237=\221\021\207\275\202,\202\2750\210Z<\200\021f\273\203\302\241\275h\250\370<%\231\303;\337P\373<\225\317\220\274$\262w=\325\301\340\2745\300$=^\014\202\273\337\340&=\032(\252\274\271~\214<\025\333]\274\301W\013\274L_E\275\346\006\206\274\263*\010\275\362\276C\275\331\334_=\204L\315\274I\007\331;\344\005\361<3\005\243\274\375\362\215\275\353i\225;~\034y\274\337\255\313;\320\235B<\265\021\t\274UZ\006=\0004\355<\320\217\330<\350a\260\274\237>\005\275\333\272\314\274A\016\362<\2266@\275E\025\360<\004c\227\274d\261\260<8\363\036=+P\2669_\023\314\274Z\\\276<\345\017u\274\262\024,=\266P >\271\356\245\275R\250\260\2749\004\325\274\244\016\313\275rc\230=c\316S\275t\341\253\275a\366\256;\016\373\234\274\244Y\023<\274\014\334\274\003\000\251<\355$\304;\006\3654;\374\347\031\275\022\237>=\216\311D<\372\333\210n\327;\226\201\335\274\016\351f;\347\327\224c\322\2730\0202=_\262\030<\313\211V=n\360\257;~\014\226<\247\301\272\274\354$3\273\277\354\016=\245e\301\275\330<}=\357\3768\275\3014\330\273\226>\332\274\036z\317<6\'w={\305J\275N\000\001=\237\017\235\274\005\377w;P\000V\275\330\3424\275\000\304\343<\030\341u\273`\344\277<\351\317\224\274\213\344\007\275\217\3115\274\233\225\210=\024F\311:\221R\005=y\230-k;\2050\342\274\244\000\035\274\311R\014=3\224\267\273M\234E;\206\034B\273{\371}\275\246>\271\273(\271\376;t\312\003\275_\210\032=\234\335\270\274\"$}\274\211\245\276\274\034Y.\275\216{ <\342\365L=\202\222B\2732\2478\274v\265\032\275U\224\231\274\237\252:\275\263\300e;V\344\356<\207\255\002\275\270h~\273\rJO=\021\201\010;\234)\000<\336M\332\274\274\275\300<\207\020\020\275\364\237\254\274\037\314\264<\343\201\355\274\274\316f\272\320C.\275!j\346\274\030m\357<\243j\243;\361;\375<8n\002\275\205\350\031=\341H\311<\210\275\260\274\375\013\231\274L\351\036\275\017Y\222<\217\322\032\273\033x\337\325\273Q\303\320\273\366$\215<\t\306_\274\370i\357<}\377E<\027\201\354\274\227\224\031\274\023>$<$}\336\274\304v\253\274\211\213\016\274>\370m\275\n\014\\<\236>><\241\000\305<\310x\214;\314\013q\275\334F\250\274\313\215\343;9#\004<\256Co;\014\202\366\274V\252\276<\232\231v=\333\223\261\273\376*\216\275gE\272\274<\222\r\275\254\n`\2740;\337<\037\326W\274x\212\311\271\334NM<\372\"\366\273\001\304\371;ra\362\274\365C\275;\225O\216\275\371i\007\275\221\036\025<\337\030l\274\200\242\226\274D#\022\275L1\n=+A/\274\276\371*\275\233x\333\274\032Z&=\321\267\347\274\272\355\320<\177\373*=U\236\"\274*\213\010\273\017\375L\274\'\004\243<\027\376\021\275|N\211;\024]\026\275P\211\214\274w%A\273A0\031=\235\240\030<\263\212\325\274\353\232]*=\223\177><\223I\354\274?\242\347<\003U\251\273\200\264\347\274s\367o\273\032L\201\274\3674\314\274b\220\022\274\366\217\025=\200\251e;\222^\304;~x\215\274\350`\360;: \323\274.\276%<{\326\241<\010 @<\220B\257;\315\251\353\274\241G\361\274\237\352\227=_\016\223\274\302e\253\273\231?*\274K\220{;\260U$\275vLD\275\001\030^\275\335=8;\023\233\234\274\226\304\224\274\367\002\026=\254\347~<\253U)=\350\245\014\275\354T\030=\2678\255=\312\0310<\223-\242\274\202\276q:\320\036\326\273(\021\204<\2772\'\275\223\te<\363Nc<\202\232\220\274\355\245\321<\3475;=+\376\"\2750\356\\\275\'\243\013\274\230\266\232\274v\027J\274\351\006\r\274\340\026x<\"\230n<\023g\200=w\343\356\274f\007:=Zn\215\274\237\263\217=.\222X\275\030\352Q\275\377\031>\253<\230C\365<\334\361X<\361\2304\274\036\341e<\265\274\007\275Ca\267\273\313\276<\274\357\002e<\261\032A;\345c\205\273\264\301\251\274\026\313\311\274\207\251M\274n\313U=g\000z\274\346\210\373\273\006\025\235<;\324\376;\355\317U=Yb\266\274\324R\354\274\203d\237\273HzL=\265;\261\274\376\255\244<\322\201\2330\350<\r\230\t>h\021#\2753:{\275EF_\273\020\325\210\274\207\213\240\272sk&=F\255\214(=Ov\225\274\r:5\2759\320\355\274p_\334\274\226\006\242\2749\362\351<\305N\331<\265\362\255\273\345c\323<\264\334\373<\305c\222<\021I>\2745\010Q\274\231\035\330\274\310\334\002=#\002\317<\376\305}<\270\004\234\274\376\363\305<\352\2675\274U\024\257<\344,\343\273\275\275\371\266\234:\212\332\251\275E\005_=\216o\006=&!\343\274\036\341s:\014?)<(h4=/\215\027=\352Bz\273\347\250\'\275\325\262\213=8U\225\275\364\222\231=(\2205<\333\246\005\273\013J\032\275q\245\352=\026jS<\030\253\373\274F)\352\272U\036\216\274\344b]<\243\2050;+J\r=\030\273\260\274\342U\226\275s\237\355\274Yq\034\275\032\302\027\274\017\004L=\247\356\362\274T\251\203<\306\343\212\273\ns\245<\222:B=\013m~=<\327\256\274P\004\202\273\250\022\367\274\236\372\201=W\316\006\272R\363\243\274\244e\000=\023k\242\274;\343C\274%\311,<\333\300\214\2745\333\r<\305nQ\274\320\370;\2753\021\316\274(\265\251/<\365\345\303\274\377\230x\272\256\364\005\274\354\023\202\020d<\255\014\237\274\322\202,<\266UL<\221\033T\272\270\226\324\274L?\265\274\006\205#\275\2243I=\347)\204\274\271_\316<7\237\n\274:oe;^N\255\273l\352_\275[\203\231\275IGW\274\247w\352\274n\314\'\275\372\265\027=\253\303\200\274\315i\263;$\366\366\274\204v\346<\312\022\202=OK\260<\350\240\216\274\354\313\247;\370\364\254<\340S\352<\t;\360\274\000\365\244\273\215\255\023=Mt\254\274\207\002\013=\345\"\014=#=\210\2758\3666\2755\211\253\272-\3271\274\246%p\272\356\356\017=\262\256w=\314eG;$\316\372\272!`\324<\344\265\316\273n\246\375;\216Z\371;\220A\001\274x\265\251<\364\3152=\314s@\274$\337/=\304u\031\274\331\3746=\317\031 ==\362\226\274l\221\001=\236\332\252\275,3\220\275v\342;\274\371\023\323<\024\331\212\273&\2314\273\030Z\r=\\\210D\275\267\224\271<\032*7\273\266\263\336<\221\324\014=\337x\335<\377h\214\273\355\354x<\323\003\356\274\035\206\210\274s@\202\275<\212\t\274\201\001\026=-8\326\274\247\332\t\275\025\003\\<\240\305\014\275\321\375\021\275\252\010\271\273\013\216\235\274\304\214*\274\215\006\201<\\o\350<\270T\215\274\272\225\002\275\245\344j\275f#\224\215\243<\225\214\304\273\222\343\034=\234+\201<\331Q\024=\212\305\253\300<\201\220\021=\034\3166=>\262@<\374\355\232:\017K\004\274B\317\264\274\001\265\035\313<\344\272\025\274\335\256\313<\021\376\241\274w\221i\274W\226\215=hi\001=\255&\025\275Ws&=\034*\032\275)\344\310=\263;_\275\240N\316\274;\213\246J\030=852=\232h\366;uN\034\275z\341\321\274\037\317\271<\303\241\037=\337|\343\273/\375\351<\000!\340\273\370\325\225<\254\211\003=\332\221\004=\372\r\033\271+\261\243\273\317cC\274\256\301k<\037\027\007\275\324\327\037q\255\274-~\t=\034\256\352;\376i\367;r[Z9J\203\233\274\347\305b\274P\204\214\275\n\216\206\274\311\277q\274\307]\373<\377\230\211\275\322)\236=\036\313\010\275\010\213\255\274\000^\345\242<\037W*\274\016\252\023=8N\326\273\202E\'\274}\030g\275\324\327\032<\371;\272;7\014:a{<\032\002\223=\003\023k=\035\271\337<\214\026\020\275\343\304w<\244\310\260\274\035)\016\275\034p\007\273b\t\270\273\351,\324\273mh$\360\274Z\345\353\273Oa\200;\376z0=\274\035\026=\265g\377;\211\262\213\274\224\013\334;\376\240-\2755=L\275\237\301\206=\255\352!\274F\242S\274\365\004\257\274\351\324\360;\220\350\205\274\311\025-<\264\316$\274<\255\315\274\253\353\256\274j\246\323;\036\275(\257\372<\0165\002<\360;\344<\243%H;\007\3759\274\236^|\275\206C\342\272\247\344J\275\202q-\275,>\227\274\376\2107\273\332\207\034\275] \004=&1W\215\242\274\363\364t\274^\270\276\2749\2400;\352\334\372\"\236\274$m\241<9Y\037\275\rzh\273\326\223\027<\333r\345\274y\2078=\024\253}\274\006\345\275\274\211xA=\372\265\263\274C\350\003\275\360\362:<\250O?\273\023sl=\315\233\027\275\034\215\177<\220\207w\274\345\370\300\273\242\025|<%|-\274\241\315o7\336t\244:A}\377;Zg0\275Qi\243\275\315\207\204\275\216\220\276\274\347\312\340\273\367\237\304 \323<$A\250<\375\244\245<\274\313\323;\005X\010<\317\300\250\274\221\211\001;\033,/\217\017=<\241\270<9\233\350;\002Z\330\274T~T=3\025\204\274I\312!\274\224\343\t\275\014\231~\274\'\016\216\274\224\250\027<]\341\330<\335qs=\310\200\010\332\273U\312u<\364%\224<\203\377q\273\367\031\002\275\345\231\201\274r\320M\275\321T\356<\351\275\200\273\254+\002\275\333\272^<\251e\003\273\010\216\216\275\336b\261\273\353\302\243\273\357\272\206\274\245;K\274\310\374.\274j\306\205<\'\213\352;(\340\316;\205\305\226<\214\355S=\347\031\322:%\324\265Qu\013\275\025E\013=\266\220\316\274\364\037\364;\003\343\304<\036\316\216\274\244a\031\275Q\036\306\274i\000\206\274oB\364\274[\004\316\273\013\207`;2\243\244<\364\335$\272\200,\226<:\320\347\274\327b\022\274\354\216O\275\032\205\021=\346w#\274\276\007\313<\320\024\360;Z\271\230\274M7\343<9\241\220;8F\362;,x\257<2\241\235\273qI\276\274\031\313\232;\360&\265<^;\030\275ky\367;K\371\004=<\302\013\274&?\252\274p\200K=nT\302\274\266j\303\273$\\D<2\302\010\275\220\237\025=\256\t\025=\207\360\021\275\364~\367<\320s\263:\306go<\212\325\212;A\324N\275X+p=\270{\325t;\310\001\246\274\215U0\275\332\"\220\273\207\026\263\274\031c\312\274,\323N\273\370\033\010<\312c\212\274F\r\330\271B\323\225\272\2154\221<(\367\203\273s\200X<\275HX\275\345\257{\274\231\274\341;Jj;=\245G6=#S\233\2730\231\272\275\207\240\t\274\033\263\371\273:G\210\275\352\310c\275i\245\023=\204\250|\274\244\233-\275\230\313\376\274TZ\227<\207\343\330\274\004\350\205\274\334^\222;@\020\254D\233<\351\226\332\274\334\204.\275\275i\335<\321\315\230;bt\263<\216\036@\274\021\327U\272[TB=+%\002\273x\336\312\273S\217,=s\311*\274\017\214\'\275\322\330=\275@\221-=\364\200\261r\014\347\274\330\215\212\275\225|<\274^\320\336\274\353\303\034=<4T\275\344\305x\275\310\336;<\353\231\306;\301\314\026<\201\246\206\274\250\020d\351\311\274I\346s\275\013K&=N\003w\275\314]r=+\331\021\275\2234\247<_\363\300\274\227\322\264\251\006=|\322E\274\276\353\214:#\376\366:\206\266\265\274V\235U\274\221\304Q\275J\372\321\362=\335\213\314;\250\272\350\274\022\273!\275\207\t|\277\307<\264\261z\275\331{4=\3521\361\273\0011\\\274\002\227x<\256\337\201<\265\277\260\273@\376\033=\377*\267=\251\262}\275>/\236\274\267\024\017\274>\261\2319G0~<\343e/\275\237=\024=\342\020\034\275\222G\"\275\003\375\302\273>#\363\274\035p\033=9\t\376:\344\312\014\275?^\327<[uI\274\273\006\204<\237\371\337<*\310\257\273\350\320\222<%J\337:H\240\030\274\257\213C<|PF=\352\002m;\332\364\022\275H\232\t;m\313L=\017\202>\275XA\244;df\226\273\021\346\223\274\017\024\031\274)\226\013\275\277\241E\274\260\031\371\272\331*\312;\324]\036=\024\316O<\177j\007\275\227\301(=? \367\274\tb2\275P\327B\'\272\301\341h<\310g\207\275x\030\367\273Z\312F\273\356g\022\275w>\377<\210Q%\274-){<\217\370G\274\357\002\364\273V\225|\274\346\010\345\274j58=\032\017P\274\352\207*=\301\254T\274\2632\272\201<\020\361\320\273\301b;<\311\261\024\273\031\014/=\033\243\307\274\252\245\232<\345\362\264\273\030y\377\273\204MZ\2734\267A\274\243\344\303<\256\342\270\275\252\323/=[\023\006<\256G\001\274\017\353 \275\250L#\275\210T\276=\256\000j\274\322\002G\274\246\350l=\346E-\274\226\306\305<\025S\250=\3355\273\274\024\325\215\275Q\320`<\264\335:\274U\203i\275\370\243\217<*\377\177\274\004\376\356<\\\223L=\220\021\005\275|\202\366;\211%\003\275Q0W=\347\340&\274\331\231<\275H\2068=\270E\'<\277\362M\274N\354(\274\312Az9\003k\254;\\\036\276\273hT\035\275h\300\304\273\251\366|\274\357i\021=\004\327\370<\316\246=\r\323\273c@C<\240\264\246\273Q\375N;{_V\274\374\267\261m<\322\373\346< \346\016\274\tb,\025\213\275=\275-= \t/\274NY\000\274%\220\004=\307\372\351<\237\301\376\274n\255\034\274\206.\020=a\270}<._\211\274L\215\232\275\302B\272\275Ny\307\274\324\3175<\322P\331\275N\243m\357\265\323\274\270\273\312\274\277\223]\274\031\305\032\276\022s\203=\205v\006\275.i{\275\200q\232;V\031\215<\375\361w9H\214\301\274\354=\"=|\355\312<[}\231:Xz\254\274Q\226\003=\266\321\341<\031\235\017=\321\217/\274\366# \210H\275\274\356\334\\=\'\022\005\275{\233\020<.\023\347<\246\035\275<\033\223\224<\024 \254\273\326\353-\274\232\'n;IH\274\274\267\000\035\273T\3470\273\310\001,\275r\232w\274\207Z\205\274\002\337\022=\310\355-\274\354a\325\034\361N=\202+\017;\326\210\344<\377dw\27533\'=\324\324\266\275\224F7\2747\3532=s=H=\367\216\026\276\235\016+=\024\321S\273\206\315\245=H\374w\275\316\307\254L=\266\220\023=\240\025\017\275\"C\265<\035\356\334<\352@s=g\t_\275Me\021\275\202\202\001=y\225\267\275\353\340<>1\320\246<\'\242\"<\226\254\032\275\324n\026=\253O\224<\352\033\014\274\261\377\314\273\350\273,\274<\004&=\341\373c=\257\246\022\275\2569;<\250#\021\273\321y\215\274\346\r0\273G\021\202\3639\210\275\034\r1\275 ::<\302\275\234\274\2103\225=\276 3\275\227VU\272\030j9\274\326#c\275\266\177\235\271\314.]\273\310\374\023\274\352\251`\274\216\274\220\2752\266\373=x\033\310?\353<3\377\356\274\252p\246=\325g<\274\007W\227\273`\336L\273\207\022\204;\177N\254;P\312\302<6\367g<\344\262\202\274\354\224X<\252\315\363<\343v\027\275\223T`<\006E\234\273\2659n\275\364\006\004=\016\275s\275;\315?<+p2\275\275\331\375<\363\252\023\274\364\222\305\274D\237C\274\312\344W<\r\226\200\274\001\366}<\214\003\266\272\032\303\255\273\226\274\245;EX\225=\201\266\223=/`+\273\273\216\301\274z\263\361<\323\264\210\274\314p\204:.\n\273\274`\0070=\220\017\213\274/s)<\220\327\242<\262\313H<\2347\355\274n{Y\244\215\273\036W/\27524\020\275\243\234\253<>\314-\275?2\022\274$\333w=\030e\260\275>s\004\275\326\304.<\372\362q\275\344\334\354<\375ei\274\351\312p=\365\303\215\2746\337\221<\217#S\275(\023\301=\326\225\n\274\222\264\037\275w\314\330\273T\005\221=\270m.=N\201\243\275Z\360\277\274\251\200\343<\340\273\355\272\223\241&\275K\027\240\274\204\0345\274\264\t\000\275\035\014\214<\302\3103=\373\321\204=\216\237\021\275#8\317\274\200\022\254=\002\327[=R\303\034\274\3632\320=YvH=M\r\177\274TQ\265;85o\2732\305\215<0p\202\275\241B\204\275 \343\365<\250&\230<\372(\014\276{\301\234=\331l\004<\267H8=ywp<4\231\354<\030f\022\275\237t\263=b\362\301<_\364L=\254C\344<\027X\366<3!\276\273\023l \274\002\325m\275\034}\035=\233\024\226\274\304!\231\275\357\006\230<\037\275\024\275lP3\336I\020\275&&b=\234\301\317<\227\213@\275c\016x\273\201\036n\274\023\023e\275\232\016q=\r\216F\275E\021\305<\'\026E\274G\216\231\224~S\275K\326`\275\'\333\201=\t\230\266\274\255\035\027\275\346\300(=\013\307\025\275)\021\332;\037\003\326=7@r\274\000\203d\273\247e\237\275\301\216\301<\270\\\024==\226%=NC\314=\031\020\227\275\212P\225\307\n\240=\023Z\014\275\354|i=\374\307X\275\222\257B=\253e\240\275\340\335\275\274;\363\032=::\241\275\237{n\2740\202\300\274o\034(>\237\336\005\275)\032\261\276Z\267<\372\265\373\274\232c\000\275\352\345\257\274\032\'\024=\014?\177\274\376\031\337\274\202,C\274\260\240\333<\344\216\345\271\251\326I\275\203\3020;\265\212\350<\257\310\034\275x\332\377<2\334\200<@\324S\274\250\200\235<\241\215\316;\376\255\003<\255\331\315;@\247\243\272^Z\001=\334\207\"<\213\372\243\274\005_\212?=BJ4\274=@U=e\n\014\275\371\027\367\272\262\373\377\274t\245\204<\361\021\354<\320/\247<\366\215f<\\1\326\274H\335?<\036\252\031=NsQ\274\254\243F\275<\241,\275\005}\016\275\3753\240\274\033H\r\272\242\231\221<\306\301\306\3226=w\377\236=6\377\311;\215\035$=3\205\263=\007\225\006>/\373\017=\240P\020=\246\235\030=\322\311S\275\270.\244\2747$\273\275\274&\371;\323Ji=\2648T\275\344\t\335\274E\314\225=H\324\016k\275\302<:\255\247<_\340\306\274\2028\336<\356\223\200d<\204\035 =\\\037\325\274\rE\362\273\312\026\003;\251\025\306;\225(i\273T\022O=T\021\272=\254~V\275>!\r\275\304\344A=;\375]:\201y\306<7\032\276\274E\014Y\274\356\0234=\013\206\214\275\0077f\271m\036\257:f\t\214\275a:\360\2733\3717\275\362\035\216\272\362Ls\275\024\311K:\3020\203;\33556\274y\271n\27455\210=\017|\001\273\333\326\263=\314\373\024=\232\216\201:~\372\303:\333eL<\217\237\255;\232\004\224\274\240K\226<\356\356\346\273\006\'r\274)\010\304:e\321\3548\220\214x\273\273J\034<\203\216\200;\370\271~=$\341\325<\340:\325\274\260\t\203\274\275Q\307\273\210\237\215=\271$\325\274\367\002\005=\016\267\327<\032Rl\275\240o\t;j\311\017\275f{\322<-p\276\273\363U\225\274*\365\010=\252\343O\274\316\036\370\272\214\330`\274\301\226q\274\237%.\273\344\370\276<\314\337\346\273n\001\222\274\252\362\221:J\353$\273\030o\000==\\ <\014G\014<\212UN\274\244>\257\274K^u<-\026\007\275\243\323\241\274n?\307;\307\316\356;\332\330B<\344\036\214=\206\352\025\274\364\302\177\275d:,<\354\215\214;2\244+<\333v\377:)\363+\275\215#\206\273\017\312P=\2653\020\274\222\201\213\274k\243P\2756[\027\274\200\254\377;HYD\275\214y\237<\327]\214\274\302\3032\274\237Z\215\274\251\016\241:W}D;`\274\257\274\364\326\310:;,\361<\037\360\017\275\204\020\r=;\022\"<\212\226\010=]b\024\275\347\217\005<\344\365\215;!\347\207\274\013\037R=>\030l<\357l^<\321xL<\022[\271;/\346\032\275\006W\035\273\263k\010\275\205\323\241;\026\212\003=7 c\274c\334/\342=4\274\035=\r\025\311\274u\016\271<0\316z;\251\273\001<\305\270\352\274\202Z^\275K\313\204<\316\277\207<\177\253\010\276\354\307\253=\257\365\n\2739Q\t=\343]\251<\272\357\004<\254\021\271\274c\210j=\000\350B;\226r\213=\271\236o=\020o\271P\274+=\226\376*\275\2009\317\234\027\t\275/\031\254<\276\242\003>\230\365F=\263=\033\275\350\234t\274X\316\222\275\007\200%=\211I7\275\262\243\354\274\010\353\314=+\3250\275\203\016\"\274^t\343=\030\001\252\213\037=\221\275\014\275\274\367d\273\214\346\3131}=P\323\306<\316\231<=\341\t\231=y\367\006=\243\026\037/\270\274\334\032G<\271\246^\274\377w4\274R\235\r<}u\235\272z\272\332\356\200\274\2174\327\274?\222\252<\322L\226\274\255\245\357<\354\370\001==\314\221<\210d\221\273\362\335\301:\275\"D\275\021\274K=\320`\254\273\206\034\206\274\322\227\367\274\363\304\372;\003\270S=\n\312\330\273\023\212\201\274\376\252`=0\373\026=%\215Y]\274\007@\205\274t|(<\210\336\315;\361\345g\275\220XH\274\355%2<\217\312b=\025\266\267\274\345\323\211\275\313\231\033:\327 \004:\376C\204<\204\253\273\274\017\245\326\274\362\2053\272d\013\273\2737\337\234\274\320L\356\274\266\245\n\273\034\307\354\274O.\333;M\344\224\272\316\'\313J=\224 x\275\232\205J=\262\222\274=\343\257\317\274\255\222\\=\024)\345;&\037\021>\t\332i\275\214?N\274Tq\020\275\361\316^\274\216\177\265\274g\226\023<\222\353\336\274\tg\260<\307\240\305\251\275\215v\010\275\322\032T\275\037\371\334<\376&\341<\313\241\225\274GwP\274\276\361\205\274%&\303\275\302\373R=\344\177\241\274tj\025=K\031H<\013-\261\274\204\324\250\274+\037m<\307\264\276\274\031\"\216:\271\244\002>\016\344\r<\323<\234;\266\r\247;\260\177V\275\237\032\252=\320w\004\275Z8s\273J\203\263=\001\377+;\037\352!\275z\312#=O\005P;\264\250\262\272\353\3334\274\361{\350:)\241\026\274\270\'\322\274\022\366\244<\242h\031\272\020\000\337\367\316\274tJU\2759\372\\\274\327\311\024\275\233\364T\275\226\005\017\274\027\330\356\274$*\226<\210D\256:\315\331\303<\276\225?\273\314[w\274KS\365\274h\037\004=\313\202\323\273\300J\006\274\233\"\304<\363{\302\274r\312\266<\277\252\306\274\345I\2139\027g\246;\036\224\006=Y\347\203\274Nc\233<\260\246\235<\314\007\001\275{5\305;v}e;n\307K\274N\nr\274\334e\352<\233\224(\2751y\345<\224m\312<\252\004L\273\203E\203<\247\371\303=\273\302\034=In\001=]\335;\274q\347\274<\3262\315\274\272q\374\274\270\014n=n\211*\274\\\234\316<\032m\270\273\220\005\261<\365%5\275\005u\3007\303\345(\275\245\037\035=\322\241\202\274\374\234\255\224=\360z\036<\246\321\304;\35098=\036\233\241\273\275\024$\274\313\3245=\001\264#\2758-\377\273\014\223\244\274{\247\313l\253<\025X\355;O\304\241\274\210\\\000=\221/\200\275\225\352h<\336\375\252O\272Q]\233\273\232^==.\021\316\274\'\246R\275;\365O8\325q\030\274\325\rN=\366\324\235<0\273\327;\001\332\232\274\2372\007=\373\010\005\275U\007`\274\344\307P<\026@\243;VG;\273\240\\\024\275\037\262&\275\032\006\246\273LH\350<\363\374(\275\317E3\275K\035\264\274\350p\2369=JX\027\274\233}\211<\252\'A\275\003\337\034=\027\222\220\274\310\307\375<\356\371\001=\311\242\031\275\027*)\275A\373\332\272c\203b=e\315;\275\001 u;\254-0\275B\020\210\274\317>\250;\232\025z\274\266Q\312<\242\275\310;\\\310=;\034\220*= 7L<\230a\007=i)\310\274P\267m\275\212\275\306\273e\234\336\274s\327\253<\304\300\001=\307`?\273\357\364\234;S\207\311\273\377z\304;\261g8<\302\264N:\236\303\352\274\371v\311\274x\271\200<\254\303\003=n~5\275\213R\007\274\347\223u<\351\241(\273\255\241\201\230\274\271\326\220<\362\016\320\274x8M\275YDj\274\n\205>\274\020#w=;\271\004\275\241\216c\273Y\016\001;M\034\221<\312\220\357\274\362B\036\275\216\016\276\274\234\n\024\274e4\205<\226D3=\365\336\t\275\3030\037\2758wa\274\346RR=\215W\324<\333\022=\275?\324\003\275[f\353\274j\244\027\273\205a\333\274\2130\027<\220@\001\274\323\036I\275\267!\254\274\210\'\233=+\001\t;T\334$=1\266\222\274(\022\265;\271\375\365\273\t\274\301\274\345v\333;u\220s=8L\005=\351\2567\275\362\334\322;=\350[=X\333\374<\257\305`\275\313\242e\274[U \274Q\236\301\274\031\364\246\275U\243A=~D\003\274\375\030\225\274\031q/;\017[W=\3341\227<\361s\234\274\263&:\274\324\364?=\267AX=\370Q\t=SKp<\270\216J9\2310\372<\'\312\251\275T\306\247\274}\013\025\275\244\237\277\274j\354v\275\345{\336;\267\356\003\275{:\035=E{\004\275EK\007\2734\367\213\274xi\351<-\204\267<\360+L\274\223\256\264:Or\307<\177L\347\271A\0058\273\275\265\316<{=Z;\3578\260;,H(<\222\365r=T\357z\275\357\244:<\343\325b<9\310`\274\006;F\274\256\210\201\274C9\232<]\003\027=\310\036\204\2759\026\003\275\304@\372;\237\226\020\275\345\236<<\357[\277\2748\362;\022\234\275$r\r<\020f,\274\321\260\346\273\266\302)\274\327\351\236;)sx\274\022\0317\274\177\361\'\274\356\250_=\240\002A<\276\036\005\273\230 \327\2742\033Q\274\305\0312=I\262\031\274n\233-=} &<\351U\324\274\207\251r<\254m\021\275\202\235\237;z\276\216:\371\323\371\274\3027\240;!\316c\273\213\271\204<\247|/=\340\217\340\272\232\233\n\274\236\322 \275\221c}\273\235\020\300<\260^\262\2746\312\010=_\355\201<\254\257\031\275\300\237\315;\203S\242\274g\304=<\352\352\022=\304&\010\275\323\020:=\211<\340\274\223\004K\275J\006\334<&?9;\345\203\311<\333\227\004\275\037b\235\273\263\237\340<\244\331\323<2\327y<\032R\006=W\236\357:\357\277\252=\356l\020\275\273\247\300\274\030z\032\275\006\024#\274s\377j\2745\313\024<\374\247\263<94\322\273\222\261\312\273\211\207\313<\201=!\275J\"\276<|\014\023<\0206\013:\241\233n:+;z\274\355\330g\307\274W\200\217=<\376<\032\275\227\227<=\361\342.\274\256e\247\274\004\216\276\274\010\236\276\274\360\341\201=\255\361l\274\273\026)\275\333\371\253\274\270\366\251\351!=\230\225$\274\335\267\335<\337\215\245\274\200\3237\275\005k]\274\227\326\n\2749\263\252=\"\253\257\275\303\372\267;\355R\364;3\206\037=\210\007\265\274\234~\273\274\263\324\244\2740+*=s&\233;\034\341i=\315\223(\274i\332\033\275\244\005\252\2747\206\255<:\2371\274t\360\360\274f)\343\274\026q8\275\377\314\032<\326Af\275v\377\234;\247\277\275<\354C?\275\037\335\317\273(oX=\223\362\352\274\244T==K\345\034\274\0256\007=\256\207\0049\300B\005<\031\016\227<2\305\261=:\213\005\272\255F}\275\3613.\274\251\347\032=\033\022\237<\340Nl\275\260\255\373;#\272\001=F\326\253\274\212\235\235\275]\373\t=\2007\025\274\254\232\322\274#\361\351;\241\354F=\216/G=\3024\357\274\353S\274\360q\031\275\353{\206=%^\201=\223y\216=\226<\251\274{!\325\272O\257.\276\370P\303<\204{q\273\014f\200\274\326\354\337;\373\017\272<\255\247\231\274t\373\177\274\217\223\373\325\305\337\274\273:\214=-\017Q\272\370a2\275\010\313\26699o\026\275\177H\224;h\\\212=+\326\033\275\220f[=hQ\331;\221\232U=\206\231.\2751\324\334<\177\346\265\274H\340\204\274\020Fa\274\331az:\213\\\"=\350\3426\275\314\217n=\356\364\000\274K:\035\275l\231\373\273\234)\320<\2458B\275\331\255\270\274\206\310=\275\242\377\233\274\310\007\236\274\003\357\203=\341\270\200={=1\274\230]g\275\257\014\014>Y\211\014\275F\004k\275\0231\016=\232\373\025\274Wj\254<}\273;=\322\340\"\274`\211\276\273\320\327\212\275\005\210\210=\033m~\274\372k\016=`g\346=\215\n.\275\260\0216=\277\247\355\036\023<\177\006\027=n1\317\2734\243\036<\037\366W;$\275\357<\022\317\236;A\300\302<\233\251\022\275\3414P\274\364\346\367\274\327\225\017>o\022\r<\224\\*;\022\313\"=\344l\206\27468\277<\250\213\220<\251\351\225\274=\376\001\275m`\024=\227n\231;3hg=\363\242{\2756\271\357\274aN\220\275\000\2363;\321^R\274 \004G<\263\024\200\275\300\366w\275\310\316\t\273vf\n=)\357\207\274\362c\312\273\034\244%=\302\333y\273\260S\361\031=\373\357\244\273\250\000\335<\353\002\251<\027uE<\300)\233\274\022\356\277\274\276q0\275\0130\304\274B@\316\2734.\310\273}r\346<\306&\217<\310\252\251\274,\320E\274\311\031\271\274I_\337<\034j\2779\345t\202\274\241>4\275\036\230!\275!Ag\2747^t\274\364\264\257\274\327\033p<\2427*<\'[%\275(\221+\274\314\2319\275 \3762\275\346\257}\274\211\r\013=\234G\"=\277-\200<$\245\255\273\022R\026u\247?<`\274U;\301\225\274\004W)\275\204]\244\275\035\262\237=\3124\211<\215\036\370<\211\211\205\274LS5=y\031\021\275d7\323=\30401=\217\251\265\275\270\256H==\312\006=\2451\035:2\266=T\352\034\275\2237x\275\335>1\274X\213\000>;\206q=\353\251\215=M!~\274<\2104=#$k\275K!\326\2756\231\221=\264\313\337\272d\270\'=\272\3674=\355+w\274\220\222\222=}\215e<=Q\261:a]\341\27363\016\2759q\267:\222\253\335\274C.\203\273\270\214\203<\322$\004>\330\223\226\275\307E\211\275]\255J\274K\207*\274\202|\220=\370$\346\274\373\244\350\273!n\007\2753\033_\274%\366\252\273\277\371\361<\205\004K\275\335T\245<2\270\257\275m=\357<\207\233\312\275\244<\'=2\335B=]\243\206;m\\\271<\034\360+=\350\341\200<\262?_=\221+\204=\371\351\234;\010g\345\274\326i\013\272\031H5<)\033U\275_\376\234<\373Q\226\274s\353\271\272\343\201\274\272\206\320\036<\363\346\005\274\3117\'\273%~\223\274\027/\233=[Tp<\374\346\261\274d\200\001\275BN\236\274C\031\214=\357^\333\274{\231j=\354\322\032<\326\214\033\275\241\000\316<>~\327\274\272;\303\274\357\371\026<\261\024\223\274\247\035\203\272s\353\t<\300H\273\274\340\330Y=P\357\254<\362 \370:8\2015\274\223\335)\275d\232;\274M;\252<\324\360\016\274\017mA\274e6\022=8\2030\275\315\355e=\275\313u\274\340|\017\275\031\271\370\272\007\220a<\236\325\374\273\224\023\3679\035\276A<`\266\225;\314[\244\273\352\367\3248\246]\275<\217.\024;\0224&:\255\366\022;z6\017\275\243S\021\275\270\353@=\322\305<=\202c\305\273\367ZC\275\215\2140=\016\370\007=\251\365\004\275J|\327<\3564.\274\342W\003=\"\375\311\273\226H\252<\334{\342\364\315]\273\341\234\240\274O\300|<\237i\033\275\241op\275\225*\353\2746\217\210\274757=5\340)\275\361\261\027\275\323\335\003>\316\322Z\275V\204J\2758\337t\275\274\360c=\343\024\032\274\322\026\177;@o=\262\274\237\255\357\274W\262\271\274\227\331\253=\240\006\314\273\335\253\206=\224\327\204<\235\027\343\2744\213\004\275{\273\n\274\323\371\022\275\020\020\363:\345\376\376\274\036w`\275\364\220F<\2211\216\275u\260\240\273\3131\364<\225\211^\275\236o\367\273|\343:<\370\307\202\2758[!=\264\264w\274\232\t!=\301\014Y\274\250A\013=\'M\204:\212\"\330=u\213\303\274I\0175\275\245\216\250\274\230\016\016=\363\252v<\376\343(\275\'\221u;V\334\316<\355Wp\274\220^\010\275\316\321g<\241/\237\274\272\341+\274,\234\211;N\314\036=\260\304^=\322\003\327\274\357K\000\2737\266!=\266\207\251=\220\000\'<\375\"\274=J\033\220;\226\031-\274\325)\276\274\265\276\204:\352\206\241\274\240\003\031\275\243\003^\275\363\241\274;; \361\2740^\317\275\307\347\213<\332M\356;\257\336S<\206\330<\272\234{z\275\002\272\310\274\263\236\306=\376\310\214=\213\203R=\251 <\371Ou=\330\276\032\273\004\235w\274\327\002\237\275Eq*=z\2103\274|\316\216\275\337\370\313\274\341\037\263\275$\270r=Cw\213=5\1778k\002\220\275\204\013M=\026+\253\274\035\361W=s\264\211<\034#\031<\3610\230\275\310\217\306\274A\337\277;f\222\005\275\216\323\205\274kq\204;\316\372Y\274|\323 \274fW]=8t\000=\002\261\205\274\010f\266<\235m2<\375,\t\274x\346\265<\321\355F<,E\n<\341\304a\275Ss\213<\240\2361\275\"\222\026<\001\320\224;_34<\322\336\205;\323\034\327<\327n\345\274\000\255:<\312\244\357\273\210+\200<\373\235\221<\377\202\305<\373\t/\273q\221\264:\0108\324<6\200\233\275\035\334;\274\225)\363<\223\220\256\255/\003\275\231\320\224=\317\376\204<]\024;\275\227t\254\274\227+\025\275(\333\002\2752\255V=\333`\006\275\330\333\341<_p\237\274\02290=\300\272\013\274P_~<\014\274\224\274\355\330\255;\354S\246\273MV\230\272\030\217|=\372[\202\275\261z&=\232\217\230\274xy\252\274\224\026=\273\027Y\314\274\265u\344<\310\242\252\273\201\306_<\212\263\000\275\324.W;\004k.\275}\364\021>5\'\337\274\345x\310\274\007\305+=\223\224\033\2751\211\365;JJ\235=\323mK\274\\x\371\273\276a\222\275\250g\327<1\t1\275hy<=\276Z\312=qY@\275\232\001\201=\334\"\206<\032\200\334\274!\253C\371;\324E\343<\351k\276<\353\302\223<\3408\2679\037o\201=.\2146<\013\335\374\2744\253\274<\221N,\273\223\2058\274\233\016\002\274\3533\260\274\376=%<\t\315b\275\345\361\357\274\221\035 \275\257\364\007<\251\001k\273\243\303w\274\3463(=\253\r\257<\235\375\341;\324lr\274\315\304\366;\324\300\005;\303\341m:A\324(>\367\342\236=\255\211\361\274\275\230~<\t\261\273\274\264V\311\274*\037\237\275\r\247\372\274\3767\262<\340_\233;U\2232\276(\215T;\275\353v\2744~\245=\256p\177\275\r\373d\275\321\311\306;Z\321\">})V\274Y\032^\274\322\215\030=\360\230\265=it\234\2758\373%\274>\365\022\276-@.=jD\364\274\025So\275\275R\231\275\310\276\202\275\317@\'= h\022>$Y\r=<[\001\275L\351=\275\034&\003\272I\026]=\237\"D\273\372\341\271\274\360&K\274\324\330\314\2523\263\274a\205\201\274\256\362:\275\261\234\374<\2140\273=\346\035\230<`A\345<\r\234_\274\370b\025>\334\007\223<\0204\344<\227\217\036=S\3617\275\311?\325\275\002\275!\275\362\016\246\274\013X\243=\177\264\257\275\341\361\t\275\321\027\273=P\222\010:C\265\007\275\021e\365\275\035\033\335=4\342\327\272\226\262\304\274\177\373\004\275{\367\006=>\257v=\374\337-\275D\213r\274sT\370<\0317\304\274\364\324I=5?\244=T\271\204\275m\035\342<\231\220\n<&j\243\274lbZ\273\017\'b\274\304\372\n\276\007\267\316\274\233\275\022\273\007\206\035\275\342\233W\274i\t\241Z]\257=\032[\002\274\016\024~\275\316\222\216\274\243\221\217=\021[\001=\210iJ=`\t\005=&\360\201=\000T\335\275b\273\023=K\335\215\275\206\376\343\274\347\354e\2744:K\275\207\262\036\275\264W\202\275\014\214\321\274i\237\304\272%(f<\206\355\024=\334a\003\276E\307\325<\340K\311=E\257\210<@\340U\275ZtX=\272\345\004=\364\202/=\216\210\367<\240V\260\274iPW\272\311\3336\275\202\270\315:\211\336\002\275\311ZL\274\001;&=\252\372\222\275\227\000\332=\257\326\314:\315\341\022;\213\016\344\274\365I\263<-~\326\332\030=(;\371\273\320\246$\275T\361e=\271\201\204=\216f\014<\347\274\313\274\320\365\205\274\225ei\274\205\214\017\275\241IV\271\017&\336<\333G\266=\211[}\275a\347H\275\244!\254<\252\342;;\305n<=\002\007\225\274\262\302\346\274\232y\241<\226\201\001\275\215\303\3319\226\"\360;\235\\\003\276\342\017\374<\242\377f\275i\270<\272\275\210\231\275\272~f\274\351\356J=\224\306\354;\206\200O;\313\364\034=\264w\333;:N\242=l\221n=\233\315\031\273*.\003\274\217\330\010<\\v\3629\"\351\313\274P+\263\021\262\274\377\005\271<\022%\t<\014W\320\274O~@=_1\257<\034F\014=*\363\335;H\224\207\274\370?\022<\002W\302<5sV\275\334\373@=\021\203\251\274`J.\274\'\340P\274\212\313\302\274]\340,<\034\034\343\272\n\356\225;t\220X=\306H\236<\027\023\273=8\361H\275\335\244\365\274\020\364\273\274\t\233\216\274|\376\021\274\000\375\037=\232\324v<\212x\364<\212~6<\205\342\021=\022/\264\275A\036Y=fN\352\273K\001\325;\330\353\274:A+\226\273\002\312\314\275[x]<\354\334 =\363V\r\274\352\231\266;\360$\251\275hP\244<\343@\234=#\010\221:\370L\016=\212\027\031=R\360\232\273_\356z\275\245\205T=\302}\264\274T\341\001=\rH\032<6\310\224\274\326\366\317\274\263t8=8j\216<1|\027:c\361\313\274e\215~=O\270\250;F\235d\274\362\340\320\274M\037\313\274\002;\357\274=A\356<\2326\231\274O~\234\274\'hw=p\341\004\275\313r\203=:r<;\356\367\264\274\257\0174<\014p\226;8E\200\274&I\311;\242(\241;a\361|\274I\301*\274 $\236\274B\214\302<\035O\'<\360y\242\272\336\325\013\275\271\237\222\273\177\372\021.\323;\263\322\262\274\366\344\222<:\335\270<\310\251\034=\n(\004\275\274M\211<\350E\237\274\014\3157=\303\301;;\323\336I=\301\206c=\275\315,=\ny\022=\nj\253<\200\247\336\274\254`\203\275*\317\222=5\364a\274(\253>\275\000\344\224\273\311F#\275K\323\027=\207Kr=Gt\n=\313p\020<\267\324\014\275S\256=<\002<\203=k\277@\274\23239\273\362%\363\274\210\035\304\274q\022\253\274\026\007\t;\307\010)\276\233\340\207\274\3060&<\016\t\256\275q\342\312\274\226\2535;G\320$=me\212\274B\307;\275\033\250\243<|g\301=\262\355\266\275\030\350\033=\336\260\346;^\367\034=\223\020R=^%\251<\241\321\341=\327\001\r\275\302\255Q\274\313\034\024\275%p\304\274e\261\366\272[\306\351\272\271\336\215\2720\326?=\233,\311q\275Y\322\014\275\320\375O\275e\245M==\346L=:\315J\275\363\372m=_\r\036:R\204\300\274|\234\236<<\261\r=O\235\356<\331p\022;\351d0=\214\tI<\271\216l=\305\230\216\274\273,x\275\314\023\375<\257\264\242\274\360\031!\274s1-\273\245\261f\273\256\360U;m\2128\275\341\323!<\272h\024\275\261\305\374\273\304\307G\207\001l<\370\374\273\274\342\263\333;u\033\263\274\302.k<\322\227\326\274\352\343\373\274In\014\273\020\323};\023x\014\276A\034\016=\010v\202\274\337du=w_\021\275\002\216I\2754\362\024\274E\216\t>\216\310\207;\213,\205\274\2219\334=\253\351\004=\225yG\275\277\250\243<]\013\n\276\237\206\266=@p \275[d\t\275O\337\234=\223\267\006\275}\r\326\273\201\262\377=!\177\n=1}\017\2758y\n\275\265{\305<\305\377\005=;\233\373\273\200\340\213\274\273O\337\273X(\275<\3203`\273\360f\004\275\223\217\234;\353\265\267<\000\006e\275(\3260<\006\265\235\274\340\357V\274\377\357\027=\352\257\226\273\301\332b\274\301.\033=x@E=F\214\245\274\243<\003\273\210.\032\275\241\344\n=[_\303<1\251?<\"\020\024=\374\031\316<\267<\337\273\246\212\237\274i,\256<\311\315\256\273tm\"=<\274\366\274\332g\201<\230\334Z\2755\217\327<\3641\306<\220d\354\274\034\366\316<\366.\037\275\252e\330\274\251\371\305;\367;H\"=i|\365:\330Q\376\2556\274>\274\262\273\345\221\243=\023\313(\274\342Hp\272\2701J\223?<\317S\267\274s\233\303\274\370\254\266<\256\006\271\273}\226\225\274p\323O<@\220\232;\211\320\006=\260S\252<\251-\231<\371\225\361\273\004z\261\215\022\220\205\204\275\301I\237\274\003\217\260\273\014\207Q\274t\025y\274\256V\371;\022\2458\274\177\224\204=S\222$\273X2\272\274O{\021=\232\332,=\263\342\377\2735\335\313\274\361r\361<\270\304\206D;\r\275\360\267\337;\214\205\247=\264\356\215<&\356\336\274\030r`=\006\224\250\333e=\343\254\364\274R\2511\275:\005T=v\005`\273\377u#\275.{\332;\320:4\275hv\367<\037\351;<\267y\364\274\234\302\203\274\351R\364:\361\363(<\363\r\177\273\035\355o\273t\277\037\275\327\311\213;\227\334!\273\034\t\3429\205\343\270<\306l\251\273V\351\004\275\213h\326\274\274\253(=,f\336\274\360\225\301\274\002\311\362<\356|\035<\270E\001;~.\214<\2603\301<\202w\232=?\336R\275\242\177V\274\032t\253<\3347\351\274M(\020<%\363\247\274\223&\247\274&\330\210\275\320gl=\n\0214\274\220\231 \275\300\026\207<\0142\245<\352V6=}\005\311\273\'hQ\273S\036\010=\306\226\373\274\354\262h\275\2122\311\273\354}\310:pzB=Y\314\222\275\372\324%=F7\314<\317\272\241\275\377\207W\2742\377R\275\2002a\273\234.\r\274\202\257\303<7\005\242<&\366\202\275^r\207=\025\331E\275\312\225\264<^\026\354\274x\225P\273^\217\223\274\234\025j<\023\0144\274\376\\O\273z}\350\274\320\262P\275\001j#;L\025!\274\237\026\251\275\306B$<\201\023\362;\311\336\016\275XZ\306\272l\361\016<\036z\007=\320\013\010\275\303\216\263\274\357\374\265+\227<\200I\274<\335\360\227\274\324\311\033=Uj\232p\207Z=\205;\337\274\332\213\226\273\200J\361\274\'-\255=\221wi\274\354\353\250\274OG\004\273\270\013\271\274\304)\345\272\225\332|<\205\276\275\274V\024\221<\030\305\010=\350\376\023\275\016\227l=\241wv;\327V?<:6\312<\201\251 \275R\033O=\300-\225P\331\274$\325n<\013\020\233\274l\257\230\274\227\330S\274\202\016\'\274X\243\225\273?\1779=\036\031Z<\356\365\033= ZE<\207\360\005<\206\3758<\356\300\245<\275\261I<\244\270]<\366\350\'\275b\303T\274\326\3467=Mo\213<\3770y\274\227t\013=\312g5=\267c\322\272\237S\300\2746D\341<\203q\033<\343\230#<\021[\r\275 \311\027=:\224\223<(T\026\274\343E\265<\260\3771\275r\346\234<\272\0239\273.\007\017\275j+\324\273\344\364\020=@4\"\273k\371\033\274\253\270=\275\350\202\244\200:\370\242\222\273\316\233\251\274\177U\030=\224o\337<\334KM\274\033\020%\275(*]=\220\334\202\274\240\323\010\2746\346J\274NB\235=\357^5\274w)\225=rd.=\304j\232=\232\323\376<\021\360\262\275xU\240;\350c\237=\375\270\276\274J\276i=G\220\366\272r8\201\270\275\330)\277\274\374M\250:iT\372\274=\003\310\274(\375\002\274N\223\272:\336\t\226\217\222\273\347I\240\274\315QC\274\212Il<\343\241]=\005\013|\274\214\222\'\274\366\203\251\274\277\313\237<\264\367\252\274\254yo\275\304c\036\275P\265W\274A\217\326\273O\312\021=j3\246\273\361{\022\272\034\340\014<\3370?\273\301\211f:P\322\027\275\310\324\206<\363\200\301\274D\332J=e,\223\274=\246\356\274\334\352/=\375B\305\274\363\233\025\275\222\360\227\274\362\235\347\274\307\236*\274Y\007V\275x\277p\274\020bt\274U\201\344:\272\323\323\274T1\223\273\325t%=E(\357\274t\250\214=\0342\022\275\"\312\\=ay\007\275r\022\007<\346;\235<1\276\270;\335\354|\274\210\016\'\274G\332-\275\357\211k\275\342{D=\332\177\211\274QgC:S\032B\274$u^<\r\357\260\2741mN\273;3\224\274Y\262\020<9-\007=\256\023\023\275%p\014\274\344\257v;:\322\357;C;\0149\374\376\350\2747\256g\274(\202J\275\317\003\023\300\274\215\264\256\274\337\202/\275/pM\275^\032\376;\031\356\214\275\330B:\275\311\343\t=\205\226\260\274W;\340<]\270\313\2740aF=\230\370\243\274\271C\270\033\275\353\334\353=\314d\211\274\026Q\352\274@\256\035;`t\\{\374<\247!\251<3\017\032\275\351\224\036\273\221\014R=2\317\037\275~\263\355\274G\205\367\273\332\210\230<\0067\352<`\035\217=\314\250\222\274\2710\023<\244\315(\275\351\030\214:\230W2\275\354\277\245=L\331\211\272\2132\363\274\016\035O\270\r\373\027\275\313YH=\355I\266=\256\377P\274\026\257t\275\360Z0%u;\226\\\226<\344_\276\274\336\021\245\274\244YT\274b\325\250<\270p\323\274e\326\n\275]\273\003<\345\026\016\361\256<\204\207i\274\361\337\370<4~\270\274\032v\n=\344\013\306<\321\262N=\013x1=\355\027\357;zM\336\273\305\021E<\227\253w\275\324$\020\273\272\253#\275q\252[D\275)\377\201<\305\037Z<\033\215\332<%\374!\275\274B\205\274d\322\267<\324]\325<\275\222;=\316\351\n\275\264\nC=\360\203\235<\223\370X<\211^\367;R\260z\275!-\022<\3655/\275\241\370%=\342U\234\275\243P^=\'w\004=>\031(=d.\374;\367\032\200\274\365\263\232=O\3437={w\257\275\363\3239\274\231P\003<\017\203$\275\0327\230<{C\022<\365\374\023\276\006R\331<0j\253\274\214\236q\275\371\342C\275jSK\275\344\256P\275\245\303\002\274D\342\223\274\317\273\210=\306wF=\362\003\225\274\037b#\274\361\230\345<\372\367R\2733@\r\275{\355\331;\305\014\017\275\376\210\331<]\335\'\275\313\351\036\275@\324\376;\016J\332\275\236\356~\275\274\325.\274\232\353z\274!\036\347\274\251\377$\275.\210\002\275\273f\354\273\235\373\326\273-\342\303;D\315\223<\361\246\307\274(\335\2069\343x\227<\007\367\306<\352\237\032<\\K\211\274M\021h:&]\322<\307\263e;\247\n\352<\217\341/\275\237\027\310\274\370\237f;\347k\220=C\026\213<\217\301\246\313\000\274\255F\200\274\361\346\266;\353\305W\275\032\231\262\274\265y~=\333\350\327<\032\316\275\274\366\006\232\275\001\251\232=\235\200r\274\256\207\235\274Wi\315\274 \227O<\341v\036<\251\335\212<\356N\004\274\261\271\023;\225\347\250\274]\332\236=\243\306+=8\007\312\275\331\035\274<\374\376\212<\237\235=\274\2207\250<\260\323\027\273ba\362\275gI\256<\005\312\026\275\335\263N\275W\204\247\273\367F\336<\254\2770=t6\246\273crV\275\220\226\317\274\326Mk=\010\001\234<\0022S\274\377\335Z\274Z\2255=-\031\021;\212\201\255<\245\247\346\275\315\245\231<\263\365\223<\005\362;\273\305\034\013=\r!\322<\024\r\345=\367\325\242=t\311.\273\251\232s\275\030.%\274j\270\357=\346\0263=c\024R=\273>\370;\027\363\367<\254\017@\275\241\034\324;\010\347\363\274\333s\227;B\316\336\271\022l\213\274\315\314\306;\327\264\271<\313\367Q;\342P\355<\013oG\274@<\024\275\000\212\014<{\267\375;R\236\026\274\251\273\"\275\314\321\007<\242s\235:\342.);:\376\026<.yR\274\241\310\024\274q\224\237\274\336!\007\274\3274\202\016\004\274}S\276\275j\305\275<\3029\242\274:\'\212=L\314[\275\314\226k\273\353\256\211=\361D)\274\300u\235\274V\323\366<\351\343l\273\306\211*\275\2234\274\272\300_+\274\352<\342\274\372u\013\275L2M=\217\206\223\274\333+C\273\3704F\274\317\247\205\275\341\212\346\273\205\236\326\274\270\360A:y\346\361\274\020\326\362\274\360\264\362=\017\210\263\274A\213+\275qMH\275o\237\365<\315f\262;s\361\202;;&\034=C9\3637?\311\002\275\243f\225;\'\374\010<\203n\020=)\300\365\274\013\320\301\274\263*y<\350\263\227\275\221\014!<`\210=\275\341=\023<\224\331\240\274N\323\262:\343\272\215\274~\264\237\243x\273S\206\250\274\375\305V\275}6\201<\261J?\275\017\031\205\275\003\354\251\274\022\224\241\275\303\003\271\275(\210\217\274\003\017\366\273\325-\316\274t\366\305\273\256R\351\274\3674\357\273\262*\n\275o\320b<\313m-<\023pR=,F5\274-\010\222<\241I*\275#\010\341=\n\031\317\274\363&X\273!<>=\377\325U<\245\352\005<\306\000\225=*\377v\275^\202a<\247\344\255\275\034P\n=\200\345\211\275\007\005\237<\225s\230=\r\205\240\274\342\374\252=\332\251\251\220F=#[\037;\362\035%\275F\373\255<\301\305\322={\376B\274\277U8\271\250S\363;\353\326G=J\311\020\275\036\274x;\\O\365\275\252\366\277;\275e,\274\266t*\275\324.\252\275\347\357R\275\0217Q=\024\331\300=v \000<\376\206\262\274\236\036\370\274&3\234\274\277e\013=\256o\324:9\211\324\272\333W:\274\251\006Q:\302\326\352\273\367n\021\275\203t\310\273<\354\266;\372\357&\275H4\014=\3617\270\274\001\367~\274\',!;\304jJ\274\360\225\205\220\037:\255\230\003<\314\001#;\212\327X:\304\232\014\275\332>\257\274\036\373\230\274\023\0209=\333\325\325;\250\t~\'\003\273\014c_=C\323\250<\004x7<.\377\204\275\234\352\233=\224\3376\021\275\005{ \275\024\316\030=\357\346\241=\362\331)\275\315\"\004;\277\271\t\275\230v\r\275\216CU\275D\312\254<\236\026N;\003\'i=\253\3343\275\211s\026\275\211\257\360<\221\206\374;\236Y\377\270\271X\302\274t\252\345\2742\363d\274\177a\316\274I\017@<*:Q\273\347\226\005\276FS\237;\002<-\275\213wM<\025\253\214\275we\305\274F\315\300<\347\034\211\274\251K7<\333+\214=<\364\276<\312\301\203\274\2520\242:\307\223\232\273CM\247\274\353\253\342\274d>\204\274\332\001 \275\361\3363\275\232\353\215<\247\267\205\257\300\274\202\335l\274>\323\207<\246\300B=\037W(\275\007\344\220;W\316o\2749H\300\274\344\324$=Ywl\274\2123\023<\336[\355\274@\256\220\274C\343t<\3646\031\273N\242\307\273r\251x;?\366\023:\017\334\344\274\253 \312;R\366\035\274J1\276<\227\002\006\275\350\367\216<\226q;=\270\263\211\274N\374\036;\270\370\014\275=\331\301\274\2515:\275(]q\273\313\205\335\273\303\'\025\274\314\377\240<-\344\002\275\331\224\t<\264\272\350<\351{M=\022\233\025\275\"\371\036=\253\253\031\272\265\374\303\274\221\255\200\275\227\2156\274\370\330\236\275\234\375\364<\365d*;\315\200\216\275\366#8=\352\321\275<\354\232\312;\014\267\205\274\277\002W\275\010\344P<;\003\001\275\250\237\030=\267f\302\274T\271\240\275\312\307\t=\"\374\234;\376\227\211=\022\025x=\373\247\230:\250\0067=,\220\260<\311\2705\274j\315\221<\344\302\365\273>\225\315<\326\246s\274\252\375\234\274\302\245%\274\240\321\000\275\"\331/=\375\320\202\273\242N\002\274\033\022&<\222\006\006\275O\242];\273<\350<\214tG\275K\r\216;o\3678\273K\337\204\274.\257Z\273X\303\016=\r\276\016\275\217\203y\274Nc\360\274[\203\253\n\245;\237\324%=\321m\004==g\236\274\324P\253\274\036_\234=T\214\030\276\277\001\224;=\207\374\273r\276L\275\006\200T\274\322\321\203\020y;\300\215[<\001\205\t=A\271)<\000\'\036\274^\257\026=\177\377E<\211=\265\274\037\315|\272\3550S<\177EH\274\263\271:\275\"S\203\274h\345\021\275\273\233\246;\360\363!:`?0\273\267\014t\274\200D\271<\302\345\325\273\300\306g\274\204\237\232\274\265\345\201<\002\363R<\372\214=\274n+n\273=]\367;\336\245b<\354\341\231\274I\344\214=\245\375k\275g\332\325\272\216\267\203< \371K\274`:.\274\326)\272<\303\374*\275\203\303\037\275\243\347\215\275\247\356g=\201s\245\274\025\245\231<\241\327\260<\267\031\240\274B\306]\274X\240v=\354\365\023=\262\202E=\342\251\232\274\235\354Y\275\033\261c<\204\244%\275{\333\225\275FL\220\274\302\246TR=\031\312\206\274\371\321\016=\314\027\217<\270\313\214\274\232\260\306;\354\024\240=\327\024\217\275\373[X;)5y\275\314\226\352\274+H{\275v\\j#\251<\216\302\267\2734\2259=\031`\354\2730N8\274e\353h;Rz\340\274\032(\323\274\202\344d=\013\016G\273\351\310\302\274\035\375\242<\200\354A<\243pV=J\347\216\274>\244v\2752\373,9\256\003A<\2325\336<\207\r\261=\377\3223=~u\305\272/\246o;jAo\275 \3505=\364\373\225<|(\275<[\315\013=\016\372n\273Y\2557\275\351\222F<0a\362\274\370\0263<2\302x\275 [\001=\365\254\274=\265h\032\275n}]\273\215\364\226\275\232\0036<:\277*;/\177I\272\344OA=_9Z\275\360\264;=\2632z\275:\0010=J\214j=\366\2261=\374\321\225<\216\372\027\367\213<\312,\262\274\233#\r:\274\374\210\273!4\001\273@m\341\274v\301\027;JV\254\274\334\004H\273C9.=\253\024\223<\224\231\376]:O\323k;\r\350\354\273\206\343\330<\277!L\274\373?\237\274@E_<$B\n=E\232%\345\2745\317\366<-\275\275\274\276;5\274\315$\034\275\357v\240\273}\2157\2758\347\251;w\215-\274&\007(\275R\260\326\274]\337T\274\205\272q<\244\032O\275~\343==8\362\035\272z\371\233<\276\016\245:\000\224-\275\3032\033\275y\340\'=R\327\216\274\354\305\017<.\276#\274\240\030,\275\t\033\307<\366\223\027\274W\207\330<\035\366$\275\322\002\017\275\026\300\311<\217J\010\275w\230\035=\205\223\t=\024\332X\273\2345A=\010\267\210\274\000t7=\272j\234=o\225+<\243\217\223\273\216\n\213<\205w\341\273:|P;.)\212\274\255:\226=b\366*\274\271!#\275\32466<\352\250\350\274\210\265\237<\222\264\337:wU\313\273M\000\tZ9=p\357\366\274\314\034\323\274\022Z\005\275\360w\336<\300\3577\273S\313\204\274\016\252V<,cJ\275\347\017}\274?\3254<\222*+\275\376\270\200\2747\243\372;\222\203c=s<\003\274\334\014\217:W\337\031\274\021\272\202=%p\t=kP[\274\240vK<\351\024\t==UK<\245\027\261\274\215\200\036\273\201\343\004\275\345S=\274\205gJ\274\256\353\211\274_O\214<\236\220\362<\t\263\364:\323\306\206<\255.w\275h\221H\275\026\rW<=\344\210<\344w]<\260\272\255\273\006\'\251<\200:c<\344\241\327\273;OC=\236\003\204\2739\010\204\274,\244)\275\214\232\013=\3207\315\274\224\242\246\273p\225\r=C\376\004=[\303n<\202\257\246\274\205\342\340\272\256k\256;\312C!<\333\340J\275\001\351\206;\000\265\r\275\220\037x<\250\263\236;\037\020\007\275\341;\235\235\013\275\000\375\337\274\014\237\370:\325\331k:\211\277\203F\"\274j\254\303\274\364N\322eZ\274\0334\240<\013`\335<>}\303\273Y\212\032\274\'\325\373<\3024\031;\331\2343\275\372\003\371\274\225\254\247<\351\334\305<\301Y\365\274&\357\277\273\r\202\337;\267\370e<\233\335\003<\2276\036=\255Y1<~\3146\275\222\243\206\274\331\364\335<)\'\257\274\001\r\002;\324\024\\\274\273\224\017\275\232\256\240\254<\271\336<\274q\002\353\274\270\323\207\273\205\276\356\271 \330\300\274\340\261\035\275\241\215*\274\036\244-;,\317\254=]\261p\274\026\277\340\274\224\373e\275\2037\242\273E6(=~,c\204\306;\300\\\276;3\002\256\274\370\213\303\274M\370\032=\025\216\201:NGV\275\270D\270\273\007u\211\274\240\356!\274!r\215\344\274\n}\247;\0216\322\273t?\361<\\\035\211\2739\306\242<\341\'c<\232\344\247\274?\320\347\274\005\275s<=)\033\275J\246D=y\232T;I\030\032=e&\232\2748g_<[\377y\272\017 J\275r\313&\274\212)`\275\0048,\274\332\322p\273\013_\327<\235\\\3479Cz/<\0023~\275\366\017S=\303\335\375\274\227\362\023<_\351\017<\231\017\363\273\356@\215<;\337p<\263\203\007\273\310d\255\275\243\367y\2748@\340\273\013\216\260 =\033^7=t\217\341\274I\212\n<\327@:\273\213\035\316\274\027g\214;\220t\254\274hW\216\274f2\211=,(\010\274\371\217\300\273P\312\216\275xR\177<\217A\273;:\002\266\274\212q><~\333\357!\273(\333\036\274\243\301^\274\260\024\343\274&(\325;\345Z@\275\360r\324<\376\n\206;\213\030\214<\206\301\001=\242\257\362\274\305\303\274;\356\006\237\273|e\025\274\251e\212<\032\0105\275\334N\244\274\010\370T\275\211\r,=\344V\355\274m!\030:\307\332\r\274j\352\270\274P\r\001=\306K\341;\302\335{\273\315f\242\275\266\t\213<*$\002=\226\035P<\351\3358\274\212\376F\274\022\371\257<]\037\0078s\322#\275/\354\355<\031\257\\\274\010\021\313\274N]\365\274\364\360\274\274\352?\222\273? x\274lE7<\242\361\014\2749\020\0239\\\373\035=,\223\003\275\244A\213<\004\355\373\274x\016\010\275?\023\020\274\305~n\274cI\215<*zA\274\007\177\304\273i\340\321\273h\022\256;\306&<<\212\033^=9e,\273\2378\020;\032\250\234;\324\240m\274\034\3061\275\270?\271\273\257\300M\273,\253a<\246\253\231\274\212P\200;\310\222y\275\206\267\200\274\242\226\021\274-\205\236:\004\246o\274\363t\205\274u\202\020=\274\025\322\274\272`\344#<\207\224\004\275;N\316<\220^\203\274\237\002#\274\347\226\302<\263%\214\273^\010S\273\010\352z\274\375\025L;7a!=\370\257\225\274R\375}\274=\241\217<\215\317\002\274\2675\224\273+t\250\274%\377l\274\206V&\275\217g\357\273\341\354\225<\204\032\013=\304\211\271;\226\331&<\312\006g\274\214\303!\273\034Ub<\223\030\236\273\001^\312\273\255\003\327;\303\000\370\273\222\003\260;o\016\302\273\347\376\236\274\242\244 =\272\252\014\274cM\307\274\"\215\022\275\335o\315\274\304\327L\274\240o\037=3*\356\273\303\227G=\251=\342;g^\036\274:@\231\274\311\000Y<+\255\026<\236~\224\236\034\275\330N9=v,\014=\232\234\326\274m\t\371<\345\320\215\275\361j\236<\277\221\222:\265\354==\222\372\304\2742\341~\273;\322Z=\010\235\301<\224\232\206\275\004\037\251\273MX$=\027Dv=\030\315U=xZ\026\275\256S\234;\371C\004\274\275\376\005\275O\220\327\265\273\017\206\221\274\300\345\316;E\004\326\2741w\366<\000f\016\275k\'s\275\034\262\222\267\034\216\007\275E\342\327\273\342\277\340<\262\025\345<\313\377\232\273<\333\033=h,\323<\246\351I:=\017 \2745\366\026=\352mI\274\222\205\347<\267\006\306<\332\300o\275\\L!\275\014\rK<\246\245\\<\304\263\330\274\351\372e\274\226\342o\273\"\312\264\274\230%\204\2755\341$\275\355\360\211;\344\024X\274A\2018\274\371\375\t:\247?L;\334~=;\367N\275\274\026\334\205<\317\3428\2748\260\t\274Wl\337\273\325\004\233\274\232\020]\2752\025\"\273O\265\301\2744\223\234<\316|\266:\227RC<.\024X=\204{1\274u\363.\274\302tX=\361:\344\274\271\321\200\306\274\033;\237:\315z\324\273\367\246V\274\0256\227\275\265#e=\323\2675\275s_ct\177:\235H\006;Gr\372<\013;\005\275R\345\234\274\340\266\345

z\273N\201\314\273\021\307e<\256\024\326<\214\2145<\033\002\372\274\366\224\232<\230\275W<\302\034O;\036\245\027\274=)\203=\214k9;\2657\343;\310-\255<\310\267\244\274W\231\222<\036#\264<\377\344\025;}\002?<\204\323r=\340H\203;\332D\371\274\240\033\366\274\024\374\372<\256\240\027\275&2\213\273\032H\217\274\304\206\024\275\252\364\257<\234\276$<\203\001\033=\t\316\321)\306\273\263\247\033=?,\342<\013\001\023\275\004\212\305\274\262\206\030\274(H\305;\3346}\275Ju!<\323\354\177\273^9\352\274\340\216\367\273~_}\274of\262<\372\212\222\274\375JC\273\025=\010<\222g\035\2745\322\265\274\326_\242<\021\017\t=\035\315\241\273\342\262\375\274*3\000\274\320\331\r\274\333\261\351\273\212\031$;z\226\315\273\210X#\275\207\024Y\273\215\305\265<\022\236\030<_q\206<\020\220\220<\200\326;;\022\306g;\264(\250\274\353\256b=-;|<\233DZ\2740L\336\274\350c\331<\373}\204;\277\017\367<\323\311-\274:\344W\274\274\365\243\274:\377\221<\343sy=*\177\204<\312\2677\275\0373\022=\363}\203\275\032\346\033\275\351\307l<\317\2742=\312\nH=\366\273\340X\275KNk\274\0376~<)\\\002\274\033\312\013<\306\342\242<^\214\034\275$\347\230\274\260\335\373;y\254\'<\320\025\222\273=\022\217;L\355M\2753\rP<\254\372\016\275\331Z\"\274\373U\237;?(\033<\212GT\273X`A=\214\367?=\213\021L8\274F\002\023\275\245\304\315\274\264u\253\272\347\326\022=\rF\225\273O\234\327\274\302\001\245;\014W\260\273g\265\315:\244\274\211;\206X\006=(\305B;u\366\001\275\220Q|<\220\347\032\274)\276\236;-%\277:\312C[=;\026\314\274\221|\253\273\3056[<\303\271L\274X+\353<\376\030\301<\351\037\235<\237\207\274\273B\'l=\322\331u<\270\207\254\274\3166A\2742\310\035<\330\0042\275\357\364\271;\020\236%\274\377{\026\275/\316\203<\t\177\240<\223\352\217=\236\331\004;\334\344h\274\334*\352\273\236\244y\275\227\016G;\314R\2539@\256 =\362N&;9\370\200<\216o/<^\240\212\274\263;w\274D\303\3459\026\234G\275\357\253\341\273\325\377\023\274\344$\251;\244u\277<\034\370g=m\205\331\274F\263^\273\226\0367<\006r\262\274T\271}\274v,u<\272\276u\275.\ta<\215\014\322\322;\027\310\207\223\274\032\\\003<\000g\253\273\372\302\027=\274\2667;)\257\031<\336\231\016;&\356\2728\3133\307<4\303\205\274\312\217\334\273\352\314}=Iy\363\274\356\026\031=\233\2035\275\270,\037\275Q\024\367\274N\353\264=t.\\\275\244\2341\275\037T\255\274v.\021\275;\231_=q\310\202=\361\340\251\275\255\232\306<\234\323\013;\303i<=\361\252\217\274\342\255\322\r\274s><\275\247{\266\274\342\006\221\274\030\335\366:\270\357\365<\224\326*:\212\014\255\274?*\002=1\227\360:\025\220/\2735x\232\274\243\230;\272\002pZ\274:\034(\275X\006G\274+\347\034\275ml\306\272\275\035\226\274\365G3\274\026\303\302\274#\370\203<\023\326o9\315\251\006\273\235\033O\274\351\210\331<2\264\010\275J\006w9\224\224\034;p\332\225\273:u\330<\177\026\202\273\246\267F<\312.\217\274\271\031K<\352\013\311\274\340\355\031=vxY\275I%.\274\204\324\220\274\\(\031\275k#z<\205\275.;\203\017=\275@Y\2507\234\2034\271\364\306w<\231\300\333;7U\233<\241\235W\274\"\204E<\330&\270\274\2751<<\035\240\332:&\255\210\274\251\020\034=\223O\222=\024\235\213\273\371\304\265\275\321\216\310<\302\247\216\274\317z_\275\243-\266\274\3157\035^m\243\000<\27539\302\274\261\252`\2749m\027\274^\270\002\275\316\314\314\274\3431>=D\036\014=\345ch\273\256U\037\275\353L\177\274\022\234\217\274~\35730\234\274=\371\010r<\340$C=\344!\001=\330\217\213)\301\274\026\204<<\363\372\342\275\005\232\201\275\303`\231<\214\201:\357<$\246\364\273\235\272\260:\335\225\334;\245\255\304\274\276\032x;\023\362i;\225/\326\274\270\3560w\226\274\260\267\373<\260\230\337<\331[3=r\315\252\274\002\3070\274\255\350\250<\225\264/\274\">\232\274 .\364;?|v\275\270\222\016<\271\230\007=\220\2255\274\\G\016\275\224\203Q=\332R\272<\220J\021\275\267\n\013=\352\311%<@$\004\274\312>%<\007\213\202;\0149\'<\356\020\'\272xH\227\275\364\215\235\272]\214\003=\205\262\033<\017\204v.I;@|l<\246\314\322\273p\033\035\275\'\210~;\002t]<\261I\021=\252\375\020\275\\\363>\274\250Z\253<\237\177\035\274\243\355\276=s+\001\275VJJ\273\340$\311\274\242:\017=\370\363\347;\343k\346\274b\355N\274n\373\016\275\231\323\033=\3220g=\255\273\224\275\252\240\240<\312E\006;\273<\002\272\3548k<\276|\3754\275\365\306\273\274\226\244\265\274\020^\272\272\254-\352:\201\016|\274\245\264\371\274/A\272<\270:{<\004\345\244\274a\332`\274\363\371\350<%\333\375\274\374@\271\274\204,\327\273\3308\002\274\364U\217<\204]\205\217<\307Q\214:\314X\010\275\300\373\0326H\221\336\274\"\244\'<\357\004\315<\250\236&=\010\202\224=pa\270\275\224\312\246\273\257\025$\275\014\326\340\274f\3072\275\346\273R\274\321)\360;\336\004M;\254\203\234<\373\234\277;\311`m\274\244X\2106)i\262\216\274\310B\321\273w6\035;\223\177\246;\330|j<3\237g\274o\367C\274\342\004\177;\234\330\364\273\322\333\230\274F\"\344;\215Fj\274\344^\261<\002?\n\273)\335\355\274m\361\241=x l\274\330\007\304;b\355\271;g\211\277\274F\350\013=~\274\004<\330\337G<\376c\241\274\274\271$\274,\342\303:[*\344\273g\367\335<\260\245\247\274ve\245\273\\\373K<\3269b<\036\367\225\274\221\342\207<\215\225b\275\003\212-=\270\030W\274{_O=J\210\306;G\244\375=\306\260,<5D\236;\203\225\217;\230\215O\2731@\245\274\222\225\201\274\017ha=J\226\t=\340\202o6P\224\231<\261\360\007<\220\202\231\275\356\277\013\274\340\226\007<\373\177\253;\260z\375<^h\215<\224\315\371<-R!9\025\362$<{\360V<\253i\205;\201\001\301\274\022\\\261\211\031;?\202\264\274\3209\017\274*\"\341\274\241\217\224r\275<\033\245m:\005\327L\275\030zf\274\370\364\327<\260\021L\275J\337_<\245\002L\274K\010\214\274\037\236m\274\007\236\263\274\024\266\220\274\376\376\030\274~C\204<\271\204\330<:U\037\275\310\261\013<\255\216\002=\332\033\0079\tj\021;\371Ou;\261<%\274\305\031\230\274\316\321\250<\343\250J; \252\356<\017\302\216\274Qr\270;\274\221\214\274\337\322\265<_\227\261\273\276(]\274\260\032G\275x\312\247;\234\376\025\275\271\256\227\007=]t\205\274\216\245\216\274\301\230m;\325#{\275bmh=\206+\223\273fM3=Mi.=\205+T\2755\214\207=R\276\344<7\203\346<\371\377D9\314;\222\335\034=\374n\265\273C\205\262\274\na:=l\351\300C\274\257)\207\274%\005\223<8Q\005\274mO\310\274\302\261\274\274\347\200#\275\r\252\343;\374=\271\271\315@\r\274\375I9\275\302\273b\273\232\324\001<\304\216\246\272\030\234\263;\300\000l\273\305\352\350;B*\2138\026\346\214\273\343 \024\274\211\253\276<]\252\326\274\326\277\215:\3608\254\272\345\3622;d\000\326;\000&\227\274\004\230\202<\311S\032=\"6\371\274\322\352\232<\350\037_\273\362\364\017=\342\317\021\275\301\351\252\273\264{/<\006)\315\274t\022\264<\001\330\265:X\374\327\273\\z5\275\330\3568\275\347\246\336\274\347\204x=F\230\016=\235\204\022=\240w\227<#\234O=\250\030z\274\246\021\230\360\275\346F\001\272\2535`\274O\324~\274\346OQ\275\203\333\023\274\222\234\024\275<\255\240<\010\031\330\032<\014\221!\275\016\270\371\274\206b^\342B&\375<\305\334\'\272y\340\303\274\317\377c<\372\036\207<\323v\034\275w\212q<\r\227\236\232\200;\002\243\377<\334~?\274nn\035=\233\3661\273HR\377N;H\264\005:\222\236\227\274\237g\221\274\205\313K9MX\233=\234v\241;\317\254\017=;\270\"\275\340x\352\271B`W\274\313\276[\273&L\014=\233\323\"=\007/\203\274\004\231i=\341\214x\275\306\336\022=\321\031\017\274\207;\027<\034Q\243\274v\305\010<\240*\226\273Hb\214\274\360\351\n<\014\003\212\274\257\237]=3\372\304\274\266\264\356\274\354\177M<\244Q\213\275\332\273R<\321V\027=8*\006\274\226N~\275\203\271\232\274/t`<&\205\313\207\034\274\363\345\016<\207\234\267\274\000\306\261;\215\205\3579\265\321\002=\t\234\016=\353\300\303;\016\262\377;\026\013\305\273\010o\321\372\302<\327\304\215\033\244<\034\306\364;\354@\261\274\322\023>=0\211\254<\321)\034;p\363\202\274&\264\020;]\001L\274\216\002\234<\302\\K\2744\r\251;T8\230<\233\377\260\267\206\350^\273_\032@<\215\377\013\275\222\353l=\254\026M;\014x\276\274Gy\247<\300=\211=\rTP<\277M\031\275\036}\341\274E\327\254\273\254B\000\275\'\273\233<\201a\300;\330p5\273\003n\226<\212=\353\273\222u\016\273<\345@=\262\n0\272\363\244\032<\307\224a:\367c\026\274#\017\375;se,<\032\327+\273\316\235A\274\017=q\273\332\031\331:\364S\030=\315\254F\033W\274B\020\025<\211\304\241;(#\351;t\243\215\273>\207\342\274\243F\213<\356c\017\275\252\265I\274\030\334\201\2732\262\022<\323E8=A\177\211=\274\213+\274\361\227G\274#f\334\272\377\343\026\2753q\n\274W\303\035=0\277<\274\350\017\376\274\276-I\274\256\271g\274\210\324\017<\261\023p<\256\254\265\274^b\274\274\243\307w<\264\361\007\275\025\210\r=\004h\313<\263\354\032;\013\202j=\233@T=no\241\274\024\3046=\224\375\310\374:\365\331\237<\033\3357=\357\240\274<\344\316\223<\243\005r\273\\2\237<\334\035\337;\205\211\327;{\361,\274S\300\376;\221\273\031\274j}\337;\324\003M=d\016\253\275\037lu\274x\006^\275\370\335\301\273\241,\223s\275\320\256\006\273\343\2505=\343Hg\273>7&=>x\201<\201\243\003\2755\233K;U\323\245\274\323Tr\274\013s\274\274q\016\264\273\221_y\275\220\177\227\274hu/\275o\002\271\213\273g\321\335\274\334\366\335;E\221-;<\210\374\273k\303J\273\'4e<\314\303V\274\215\264f\275^\0234\275\335\234\r<7\250\222\272\005\3157<\307\027\230\274\250\035\006\275\324=\002=\375\357O\275l\340\333\274=\202>\275\345\260\305\274\262\242\223\273k\322\331<\271\332\373\274,\255d\275\216\210<\274\244&\320=\t\377\311\274\331\340|=\244I\324;\025m\363<\000\272\216\275Gx\376<\222\0278=\031}\251\274\216\275\321\274i\316\270<\335q\327<\367\241\334;r\254\'=ah#\275\300\371\257:\203\207)=\374Ym8\3421\267\274\2520\363\273\375\0269<>mV\274\346\r\241<\033-a\274\274\361S\275\304\005\035=\246\237\247\274Zuk=i\213\241\274\366\314P\274f\206\273\274a\347\021\275W\014\224\274#\350\001\274TI&\275NE\004\274\313\270\325<\271\374\"=\305?\343;\351\276v\273\005n\272G\245\274\304E\216\273k}\327\2733w\225\274\016\244=\273\271\217\001\275;\372\272< \342T\273q\262\372<\262m\347<\305\213\215\274\321\317\277<\314V\263:\340\032,=]\r\242\274W\021f\274Be\361<\323\253\261;\332\212\203\274\340\0323\275\340\355\225\273\006\304w\274\342\247?:\234Y~<\331\306\n\273(\237\021\275\227#\221\261\273\266\017\355\2745VI<}\177L\273\313Cm\274\335\217$=\026\2517;\302H)\273\370\257\205\275\374\240A\274\333\014H=\353\020\254\274\262\226a\273\302\265>\274g\250]<\257=X=\370\355\226\354\345\274b\277\017=w\277\"\273U\240\220\274\353\024\301\273\rF\344=\033^\020;\377\255\201\275*\331J=\024\027)=\301\341\t\275\372\036\337\274\000\260\034\275\251\022\370;\\Py\274T\032\376\274Ip\347\274>\336+\274\333\232y<\270\317\321\274D\237\"\274\331.\360\274\001\2254\275e>Z;6\233E=\'\035\321<)\364\352<\t\026\205\336\274\213\002\234<\023\035\327<$\035#\275\372T\037\275\323\2647;)\250+\275\223;QC\r\275\256X\003;2\301\323;\026\330l<\377\305\245\274\341\251(;\353H\006=\0221\330\272\232{{<\303\332#\272\257\232\237;\341\326\016\275\254\221<<\252\206\362\274q\304#\275\244\300T\274\326\264\271\273\310|\020\270\240\277\025<\345\244:\274\344\317<\275%\240\325\274\372\272\266<\345\315\313:\002|\361;\276\300\206\271\250\357\342<-\267\267;p?(=\006\342\350<\362\364\325\273\335\rN\274\305K\271<7\254\236\274\352.v\275\251\n\211\2749\037\202<\275\037u\271\272\201\217\211\273I\340<=\033\342\037\274\364\331\033;\303]<<\350\246\261;w\260\"=\330\000\242\273\036F\331<\027\004\311\274\036\355\017=\230\340\222\274\013\034\361\273\256\335\023=\245\255\225\274P9\031\275\\ms\275A\315\244<\350\351g=\314\337_;-2\007\275A#p<\367\324\005=\340\311 \274\030\351\356\274\001l\326\274\255\010\247;=\'\347\274\204r\013;\001\361`=\260\204\304\274\020\203-<\323!-\273\367A#=\016 0=\255}\036;\266B\246\274\235?\310\273B\004\240\271\326O\026=\246\241\331<\275l\2329\020\317\336\273\320\300K\274\206\273\273<5\031\237<\233\247\027=7\277:8\225:\236\317\273\3423\214\274\220\345\352\275Q\230\331\273j\267B;\023%\004;!\345\206\275\004\216\006\273\274\004z\2735\014\r=\037]\026=i\255\233=\212GC<\024\006B\273\342`#\275\204\037\371\273A\003*\274&g\365\273\3030\363\274\2761i\274\341\031\313\273\006\224\023\274\221\372M<4\372\236\274:H\363<\240\033\256\272\3243\205\274T\336\262<8L$=\261]\016\275\323/\335\273m\023\036\275\340\274\306\274toe< \343\255\2744Xy\2749\021\262\273\"h\204<\364\365\376\274\3175\204<}\177\213\274d\323\271<>\325`\274\312\300K\274\310\267j\274\243;\r\274=G\316\274^\372:=\2603!\275\204\375;;\036\010B\274\371\340\374<\010\220\323<\311\206\267:O\242\204\275L\361\014=5\255\351;\265%b\274\247\214\252\273\253LH\275d\240\213\274K\326y<\362~z\2740\205;\275\321b\260\274\262\006.<\371\267\246\274S\027\036\275\024\213\320\273T\037\201\274\332G\260\273$\315\212<\250\217\377;\037\001\252\274N\237O<\357\000\345<\000\020$\274\371\014K\274\247u\322<\265\246\216\273\240\200/;*\027#<\372\312\266<\234\374\274\273\342;\250<\023\223\006\275\347\022\220\274(/\224<\364\233\035=+\004\330\274\344f\305\274\203J\316\273\357\220\214\275\313TN\275dA\237\274\206\r@\275\335\265\224=O\332\006<\367\271\020\2742\267\340\274\277E\253\273.R\023=\201\247\025\274\3337\204;5}u=LH\322\274G;\251\273\322\222\004\275\206Y\302:\026\310$\273\275`m\273o1@\275\000=\343;\023\321U\274>?:=\014\275\307`\215\274/f\027\274@\002\240\2741\374\332\274\362\016+\275\373Or\275\252\030\341\273\032\270E\275\t\324\026\275\206\025\246:\021@\272\274L\017\235<%&\313\271\263\017\034\275\367a\333<\204\021\007\275\374$\213;$\216\036\274\246\255\363\273mS\261\021_\274\0327\023<\251\001\300<\"\314h\274\000|\332\274}t\336\272s\244+=\271\005\214:\364z\306\274\022E\223\275\340h\270\274\320/\033;\341\302i\274\200\255\314<\323\356\374\274Ck\244\274\334\303\231\274\211\371\241\275\344\t/\272\001\363\014\275\353\365[\275\326\016\377\274\307?!\274\211T\377;\334\002\010=vC\t\275}+/\273\312j\247\274\376\t\036\275\235FB=F\207~\275O\221\377\274~\240\374\273x\334\034<\262\021\312\273F\373h\274\r\342O9\251\010\030\274\271<=\273\3577\r=\357FX<\340\251\262\274\022\260\270=l\204U;\377B\220<]\374\261\274\233-\034\275\"w\'=\370\273\325<,s\026\275\220\271\201=\265\230\025<\263\302C\274\035\306\300\274\337\031\231\275\255\213A\274\037O\022\275M^\366\274v\355|<:\300\362;h\036\325\274\214\254\253=\037\365\305\274\233_\207\273\335\356\210=\275y\025\275n\261\244\274\000\342+=?\"\234=\203\2462\274j%\277<4\331w\274\346\227\232\274Tj\246^=[{\331\274\317\300z\275\224\266w<\232I\277:\274_\t<\243\214\203<\357\260\026\275\331\310\264\272d\312\013\275-\247h\020\r\275%2\334\274^\300\214<\001\3627\274J\024\3429\327\320\377;\223e\326\274=\330\362<\360\316\210<(YS=y\323\221M\274\330QJ\275/\222w\273\270]\270<\331\034\363<\235@\003=\303\021\365:\325\003-\274(\203\371\273\303\001\034\274:\0314\275\323\306\027\274+\255\204;\200r\030\275\320B\347\274\232\352f\274\177\277#\272b08\273\313\363\225\274\257^\005\272\014\375\325\273h\231\005\275vY\221\2749\215\322\274Now\273\020\251h\272\013\376\273:K\223B\274M\0377=\263B\177<\371(\007\275\271Mv\273\343w\r=\254\2769\275\003\345\014\275\352\255\340\274\341\347e={P\315;\233\243>\274d\211U\274G\030\026\226\321\274\005o\003;7\334\342\274%\220\n=S\313\331:-\356\374\2747\007/\275P\347\002<\212I\371\273To\037<\372\323q<\377\0240\275\221\242s:\361Z\"\225\026==G\240\274\024\330?=\326EE<\352\223\375\273\335Z\356<\230\307\033=\242\354\322\273Jd\305\273\'\007 =\036[\032\275\372x\207\275\372\034\034\275\273\232\205={\tv\274\035a\002\274\033m)\275\340\032\225\274\337\235\343\2745=\251<\223g\002\275\213\263\014=\223J\220:\263\246I\274@`\004;F%\276<\321&i=\273w\235f<\266\351\243\267\254\273\t\216v<\230{\355\272\376>%\274\260\345\013\274K\204\300<\227y\260\272\312\230\000\275\310]1\275\232\334%:\030?\207\274\320t\226\273mB\347\274L \324\273\255\301\'\272\300\313[\275\367I-\274\201\270\364\272u\320q=\216\315E=\253\346\022\274\340 m\275\314%\260\273\341\nN\274\216\305+=\323-\026=\336\224\300\274V\341\025=M\014}=~$\t<\210\200\273<\376\252v\274\360\345\230;\215\\\226\t\274\177\2615;\274\201\207\304\274d\323\250\274\373\275:\275\"^\273<\037yJ<\2743!\2741\267\022\274,\214\265<\312\223\220\274^H\307\274\366\206\237\274\243\244;=\253\014[;\022\361o\274\233b\301\274\226\374\343\274\377p\200\177\274{\320>\275\227H\376;\270\317\220<\260\215\275<\252B\355;\231\'\\<\364\303\334\274p-\302\273\014\222%\2742\016W\274\301j\210;\217\222\346\274\322\250\244\274\017\252\017=\310\230R\274\r4\207\274\363\n\230\274\213\265\017=\002\321\370\274\016\313\204\273\327E\203\274Q\022\241\273L\351\251^\273\345\331\027\273\002\026 \275v\'A=\237\203\037\274\366\032\024=9\312*\275\342\321\212<\370\2221\274:\3103=\340\245\2459CU\257\274\\I5=\014\350\311<\020\314\363<\013f\317\273\273\270=\273\366K\210;i\273\337\274#~\347\273e\025\230<}\350\311;\346\214\306\273L\215\322<\253\205\331\274sP\353;\217\301\365\275L\373$<\267\\\313\274\254\242g;d\017\271\304\017H\275DU\342:\274rP\274s\274\r=\244|\252<\363\276J\274z\031R\275\373\305C<\262\336$<\334\226-<;\352\320\274\341mz\273r=\\\275\177\034\307\274\367\240\020\273\2505\312;Pq\273;VNv\274\2679\n\274%b>\275\337[\253<\260\276\005\274@\367_\273\224\337\003\275:\246\200<\356\333\256\253;o\2012\274_d\220\2746<3\273\233&\t\274`YF;\241\226\020\273\270\022\214;\227\330\342xa<\222\273\305;\357W\305;\302\367\027:!EU<,\212R\275\005\373\240\273\255R\240;\"\323};I\303\231\273\347\357B\275R@N\274(\242*<;uF:\373\t\007\274\260G\202\275E{\320<\357}w\274W\311\217\275(]\362\274@\251\322\2742_o<\320\376\202=$4\203\274\355\277\234\274i\223\243<\367\330\014<\2748\306<\212\002\023<\214)\036\275Y\314\267\274\357\265\257<\371%\312\273\374i&<\206\t\241\273\017\247u;\274\370\034\274;\237g=F\2425\274\224\023Q\274\300\r\252<\351\013\005=#O^<\225%h;\202\005\234<\351\334\264\274\311\231\367<\376\257\316\274\215\376\237\274Wa\360\270:\231\355\274\257W\035\275Z\344\206<\002\0223=3\304\000\274\244\202\340\036U\275h\330\013\274\032F\231<\027\365\246\274\273S\263\274\032\300\n=\321`\241<\203,\310\274\017v(\274$\320s<\312O\304\274\023\346\355\2747\240\220\275\364d\013;\363\rC\274\355\352\307\274\337\261\n<\344\245\345<\356\337\233\274\313\247k\275\n|\374\274\033\2761\274\357\230+\274\362@L:Q)\245\274[-=<\230\226r<\306\344T\340X\213\260\273v%\263;\351\342\035\273B\327\250\274e\311\273\274\341\212\020=WB\313\274\006\241\236\274o2j\275\271\204A\2746M\257\2752\036\001<_\375\360\273q\000w\275v_\323\274\220\314)\273?\215\001=\020\001+\275vo*\2754W\005\275\034\364\332\273O\261\367\274\032Z>\274\030\375J\274\205M\321\273\306\3345\275F@\262\275\017E\225\272\300u\241=\005\257\006\274\000!\305<\351\221X\274\306&Z\273\2757\214\274\020]5\275\216\232\310< .\332\274\234&\026<\274k!=t\327\217<\246Q\024=kGm\274\306\364Z<\017c\350<\246\304\206\275\2136\323;[\344p\274$\230\233\275\200E3=\262\233y\274U?J\206#\274\022\202\260\274P\355*\274\213E\213\274k\257U;P\237\272;C}`;\333\034S\275w\372\020=\2661T\275\273\247p\274xS\323<\335\221\027\273\270a\235: \220\303\273\335\227\230<\271\242\371<\037\242c\274\205\337\251\273~Z?\273\350\250\277;Y\240\366\274\305\024M;\3628{\274\266\363_\274\216\271q\275\031\r\303\273\014r\372\274b\2150\275*\332\244=\027*\r\275|T\234\274\320B%<-\335i\272\233\215\026\274Ug%;\356+\013=\363\217\300\274\254<\222\273\006o\004<\374,+=og\271\274{\244\333\274\022\245\n=\361\336\235\274\320\222*\274\255\320\261=\233\327\210=\363\005\253\273c\323\273\275\307\2732\274\347\367\034\325U\274\263&\215<\372I\n\274\371\254U\274\206O\013\275\232\303+=\360\371\007\2741Z\217\274\020\337\275\274\020{\024\275\356\020?\275\203]m;\266\265c\275j})=V\330h\274\177\365\211=\004\222\233\274?\016e:b\202\201\274,,\222\273!\0375\274\200\'\002=\317i\365<\352/==\02539\273\336#\032;\3321I\274\0055\013\274\331V\345\274\030As\274M\337]=\007\330\033=\265H\326<\370\347\006;&\330|\274\343\302\242\274\317j\020=\026\372\265\274\370.|\2741\\\371\274\037?\033\273\221Z\370<83\206\275\344\017\010\275\036\377\220<\370Z\200\212\275\003P=\250\r=\310\244\2729=P\3575<\034-\020\274f\233\004\275\364\323\327\274\234\325\263\274\310u\341<<\210\243\274\311q\233=\203^\274\274\327\314D\274\312\314C\274h\213?\273m\273T;5p\223;\270C\374\271\366\235\241<\177\272 =\260\025\207\274\266\216\267\274\234\323\037=T_c:\335\277g=\310\211\025=\r\257\336\274rf\236=\247\241<=\220\nB\274\244\005$\275\016\025j\273\356\336\007=]\306(\274K\035\361<\211\031^<[\2606\274=\274-<\013\236<:\n,R\275\030\222\"\275\326\373/<\177`\036=3\222N;k3\014\273\263\026\305\274\036w\247\274\363k\024\001\243;\327}\026<[Lc\307\016\275O\277\n\275*{\002\275z\0300=8\245\373<\302~?\273\347,\362;\263\233\030=\007\036\'\275\230\310\316\275{.z\275\344V\255\275E\327[;\017\230 \273Y\363\263;\204z\024\273\260\314<;\025\321\327<\372k\270:#yY=\033\'\234\274\334L\270<\247K\321\274b~\033<\317\250\211;\221\314\254\213\352:\371\362\371<& ,\274\330\270\3419\270\302Z<;<\374\272\"\002\241\274\000\252\352<\225\360\214\273D\331W\274\300>\216\274\021\254`\274\363\350:=\345\366\376<\274g\004\274\201\006\261<\232\215\266;\326\354\267<4#\036=\211\037\354\2740\354I\273n*\312\274B7\'<\235\212\017<\247E\035<\216\201\211=\234\241\003\274\370\004\014=\235\r\202\273d6y\274N\024\247\274\275\215;\275\3659\226<\355\220\242;\342\320\222\274\314WA<)_\"\272\214u\272:\241\2013=\372\326\257\2744\337\336\274\004\2743\275{K\260<\005\027\341\272t\343\263<\375\205\355\274\324li<\236;W\272>\335\201=\216\341\261\273\234\216\022\274\216C\245\324\265\274k\337\206\274\031\"\246<\204\345\213<\023\3076\275\327\376\311\273+[v\275A#d;;<\242\030d;K=/<\210\3037\275\245\213\000\275\233\270\263:<\034\326\274z\035\254<)\305\005\275K\033:\275\275\217\026;T\3456>\016T2\274)\204o<\3631\235;\346\310\245<\002\273\213<0/\212\274\272R\230\274YP\006\275\376J\017=\034\357]\274\357\225:<\216_\033;\261\007\025=\177\324\025:\215u\260\274\254\263\330;\236R\301\274\257\360\000=\023\364\235\274\241\264g<\205v\216\274\273J\000\273\021\205\210\2745->\275K\236\276<\000&\204\274\316\205N\275\316\250\331\272\207\264\261\274~\3104;DA\235\274\254\376\265\273\212\352y\274\207\275E\274g\372\217:\030\036\222\274~\237\336\274\243\3263\275N14\273s\031\253;\026\020\267<.\235]\274\256.\255<\007X\264\273Qn\'\274I\231\\\274Q\254\251\274\211}\301\273\334\002\350\275\222V\003\274\220\032\014\275\276%(\275\014/!\274\372\271\203<\237\244D:\346\017W\274\241\355]\274\315\335:\274=\267\322\274\024\267*\274UO\203\273|uM;[\017\220<\334\217b\275}\227-;MD\350<\250JI=\"!\202\274\241\236\275;\272\320\266\274u\000w\274\025\372\007\342\274j\224N;\302\255\357\274\360\205\322<\007\233\031<\266\244\213<\236\261G\273\201\222\\\274[D::B\275\364\273\325\024\257<\025;\334\274\350%r<\212\310\360\272e\036R:\374\330x=\353\270\246\274_}\007=\256\024Z<\374\216d=~ \002:\233\370\274\273l\254A;&\337\310<\354\341\362;\303V\260\274\353\021\332<\300\267/=m\035N\274\347\024\225\271$\253\343<\220\201]\274\204\241_8\266\266$\275f2B\274\316\033\224\274Qf\037#:\301`@<\210A\016;<1C\273[|\006=\256\035=\273Ch\266\300\344\274\352u\243\274\2033\243\2733\022#<\310\2318\274\267\310\267\275<\315\2209\004\204i<\214\036\374\274\246\002\214<\243D\001=P\317\312\274+\273m\274Z5\230<\254\241@=h\202Q\2738\211w\274\261c\311\272\230&\375\274\324X\005-\207=d8@\274\202\220\234<\357\257\377\273v\327\352<\001\265\367\274\241\270\262\273\215\361\306\216\274\327\256_=\340b[\275J\233v\273\326\\\267\274\317Q\037\275\241\275v;_\303\001=\n\304V\273Z\\\253\275S\366\353;\036\002w;\240\321b\273)\225\223<@W\005\275\334\276\224\274\267_Q=U\244?\274!\311\313<\204K\265<\205a\007\2750\213I\274^^\245;q6\205\275\315\334\000=\326>\033\2759\030 =y\025\004=\243\330\034\274,7\212\275\371+U\274\014\'\205\031\367<\260e\021\274\234\357\310;9\224o\311<\003\304\251\272\200\340f\274\226E,\275\232j,=\334\232$\275\0131O\274\343\213\235\273\204\to<\177\301\035;\003\'\002;\272*\031;\333~i\275\0301%\275\362\302q\2743\332\374<\343\177D<\177p\224\274\014\016\305\272y\"\262\273\023:\325<\237\326\r\275\'u\002\273|\366\235\272\232\334G<\354\023&=\244\314%\275\302\023\367<\221\235\265\273\220\307\225\274X\240\250\273#\036\256\274}\215\223;ag\226;\320d1<}B\207\274\222\232\251\271\003R>:\324QH=%s\304<\217\014\221<`\307\221<\245\034C\275z\304#<\335\026\233;%\r\230<\322,\027\274sSF\274\320\210\221:\020\255\256\273\024}l<\0141S\275\232I\222;\301Cd<\222\252\003\275\202\357\240\274v+\006\275\340\254\031\275\3246\005=\347s\252;\"X\3309\352!\273\274\rL>\275\257\314\253;h\307\355<\310\206\r\272\307\301N\274\254~]\260<\246?\377\274B\261N;\374\261\246p\202;+;\353\274m\207B\275\"c\341\275\334$\367\274\201\327\225<\004\321>\274\365\2224\273@\364\227<$q\227\275j\301.=A\335\200<\372\315\356;\273\336q\275}!\312<-1d\274[Tr\274\242\021z\274\346\3112\275!\2746\275\225w\301\272N\265f\274\365,\314<\300\257\205\274@o\000\275\364F\351:\223\325\377;\247f\340\274\\\347-=\331c0=\247\023\203\275W\234b\274.)\363\272\222\326\002\275Z\333\005\274\032\245\354\272\377)/\274\342%n<\340\034\365;9\202\310<\335w\212\273\037\262Z<\371e\006\273q\260\016<\2775g\273\n*\244\274`b\226\273\225\366\027\273\205G\210<\267d:=\203\312\002\275l\177\264\212\356\217\275\317\005\004=\275\316,;\003\370\276\274\215V\327\274F\256\242\274\247\374=\272G\013\265\271\370\317\017\274I\275\227<\261\244\226\274Q\205\004<\367\362\336\274\246\375\247\274\252\354\330:I\361o<\376 \017\274\354\347\233\273Fv\206\274\217]\231;\316\351\323=\366p6\274\026\311\010=\276\223\263\274,\343\004<\004NG;G\343\023=\357;\330\274X\037_\275\"\332|<3$\372;ql:=:\327X\274Vg\322\274\270\260\317<_\266 \275\225\271\2459\357\270J\274\366\221\2257\270\306\201\274\034?\253\273>jA=\275\000\260\274G\004\377\274\001vO\275\242\262\351;t\277~\275\310\353]\275\324\367p=WB~\274?\354\301<\222\322\245\275F\347\211=\305\031\220\274r\221\245\324\273n_/\274V\254\274\274\252\306\234\274\363\317y<\362\265\202<\037\331$^\235=\026\030\247\274*\302\251\274\334\245\377<\026~\212\274\314i\032\275A:\001\275}A{<)`\374\273\375F\002;\204\360\316\274\244\212\252;X\214y\274\225\223\206\275o\343\361\274\222\261\212\206<\231\007&=\016\270z<\367\235=\t\342\017\272\236\324><,\231\025\275\321\2208k\275\0038\315<0+\001\274nD\036:\354\\Y:\236\313y\274\372+\252;\031\0255\274e\241\263<\216\207B\352\233\273l\332=\274Y6==\014+\014<\252.!\273E\362\247:\336\361\204=FS\201\274\263}\321<\333\030\3219mq\002\274k\361(\231f\274\262T\036=\216\264\224=\254\016\010<\\\035\030;\343\275\246\273\361\201\250\273}Hx\275`@\\<=;\\\200\233\230\274h\332{\274\333\303\254\274\361?\213\273\213\277\000\273Q\213\024=\250\340==\027N\333\273\024a\226<\266\240p\273\231I\320\027\274\317\303+=p\037\"=A-\246<\006\t\331;\2431\036\275\335hm\275`-q\273/\030\010\273\375\237V;M\002G\275\200\033\024<\276\266\340\272\306\331<\274\\2Z=\017k\036\275n7\377\273\3437S<>\247\345\274=\312\335;e\034\235\274\254p7\275\373{\277\274\257hE;\226\342\322\273\023\316%\274\256\214\035\275\272\347U\275\205\235\'\275M\230\211\274?\302\352;\032]\257:\242mb=jp\226\274K\325\310\274\211\336*\275\2130\347\273\334\035J\274\321N\212;\373*\234<\216\360\226\274\373\241,=\016e <\331\253\242=\364\301}< \245\200\275vy`\275\t#\010<\024X7\275\335a\256=%\273\232\274\314\376\202<\r=\254\275\260\367\336;7\252\274;\"^\366<\355 \267\274d\224\241;\373\235/\274\254\354a\274\345\350-\274w\363\r\273\340\201\271\273\313\004\204=\337O\233<0\351\024\274@\345\014=\373CD<\214\270D=\310\322\"\270\341\336_\275>\310\n<\224(\027\275K\206\247\274jVG:|Q\243=\274O:=\320oP\274j\357\246<\355\027\214\274\007_/\272\364c\267<\34756\274_;<\274\000\234\254\274w8\270<\232\313}\274\367>\254;-\224G=\017\343-\274\020\314\360;N\357\266\271\234\302s\271\2122\345\026\275rl3=\224\353\344<-\250\252\272X\026\361\274\344#\003\275{\274\215<\025\261\327\274\227/\346<=\004\014\275\3609|<\245I\321<\261~\022<\021g\236\2720\336\207=\276]\275\274m\ro=\001\312f<@\026\371\274$\036\254<[F \274\025\254\335\273?o(\275C_\025<\355\236K\272\346Y\321<\305\234K\274F\026b=\177 (<\276\262\037\274\371\256_\275\221\341o;\331mf:v\nC\331\235\274!l\267\274\334G\255\274\227\001\346\003{<&\003\036<\251\233\0319D\020*=.\327\223\274@\272z=\2116\257<\314Jo<\237^\355\274\260<=\275 \2359=\223\0207=\343\262\306;\277\204b\273\243O\310\274\"\0210\275N-\207<\r.\267:\004\276\204\274\377\265t<\314^\206=\277-\003=8\031\021\274|\301S=\370Uh\275j\0101\274K\\\204=\005k,=\315\\\236\275\031?\206\274\215\255\027\275\010l\357;\303\230\250\273\350:X=\310\2507;\300wS\273\375O\266\273\317\352U=\336:\"<\246\211i\274\222\030\251<\250,\217\274\211\270\017\274\000\224\255\274j\213\347\2744\306\353\274j\354\201<\323\317\263\274\370L\226\274D\264\325<\375|\335<;b\346\274\334j\034\353\274\332\224\213;a?\305=\246/\204\274\247/F;\267;=\231\321\274\200\342\365\274\303\007\326\274\227\'\225<\235<,\032;\274m\032\177<\373z\026<$\367\335\273\032\374\266\273\2317\206\274\336/\340\273}\004h\273J\351\375\274\330\302s\274\005\360G\253\274\266C\246=\222\200\226\274\230sB\275\251\361\256\274\223\014\364\274c\361\001=GK\006\275phZ=\233=\370<4[4<\245\357\202\274\276\314\037=t\212\246\274\240\2222=+f\026=\000$\264<\251\301\022=\301Ip\274Q\304\264\274?\340\004\2742\217\363<\347vQ\273\337\2742\274\020\274F\033=\274\266\016\275\343\327\374\274}\370m=\253\333\332\274?50\275\026\253\206=\324P\360\273\353u\233<\300\207o\274\312\013A\273\320\260\n\275/\302\226=*\265A\273(\302\341\274\217\246\004\274\030\007L\274\233\275\014\275)\364;\274\r\376\003=$`\024=\203*O\273\362\001C\275So}\275L\002\035\274\021\336\311\274\243\243B\275\213)7\274\0364\220\274\035\320\021\275\027b;\271x\345k\275\324\241\026=\317\020r=P\324=\275@\004\200\274\215\002C<\034Lc\275E\336\323\274\236P:\275C\343s\274r@\267\274a6 \275\343\312\257;\306\275)<\247\212\210\274\332`\232;\270l\240;\022d\037<\212\344A\275\213\232\014\275\363\026\035\274\026;\221<\007M:\275IL\225\274\377\0160\275\206\3260\275\023\371\216`\252\274U\352V<\372)\230<\275\304\024=D\231\310\274{\212\004<2\371)\274\"qu=\325\266\266=Q\375\327;\350\305\244=0s\227:\204H0<\207zB<\010\237\332<\356\266\022;\005C\000\274`\366i:\201q\310\274\233\2518=\216\r\275<\0348\271<,\306\326:\235\241\373<\255\006\311\273\245R\374:`\307\020\274\254\277l\274\225\274_\275o\010\214\274(\222\014\275~I\222\035<\275\263\337=\274h;\223\274\363\307<=\273!c=\004\244\342<\344,\240\274?\263p;cF\272<\210\233a<\372\376S\274w\340\003\275\265<\315;\202\353\350<\250\267@=\005\331\351\273\313\347\361\272\305\274<\274\224\312Q\274( \274\026\255\212\274\231\005\374r\274\230\336\235\274\364Z\216\2739d8\274\0249\247\274\372on\2747[1\275}\335\024\275a\302\303\2735]\323\274\350w\263<[Hv=\035\325M\273JV@<>\235\222\273\301[\374;\341\321\240\274B6,\275\266\\j\274\354\036\353\274h\216H;\212\363/\273\313\202==Mh\330;\336\221\315\274\254\201\220\275\3519\345<5YB\274\017#]\274\262C\233\273\020\236A\274w1\007\224<\333( \274\330M0\274z\322\026<(\353A\274\"\005@\272\324\242\312;;pA\275\242N\032\275w\223\270;\314\005\020=r7\342;\247r@\275l\241\375\273q\254\365\275\010\025\216\274\006`{<*M\344\274\250\302t\314\274\362\237H\274\t#\035=ih\261<\003\240\004=\256\342\026\273\375\036B\274\016\006\033\275g\265\272\275\324)4=\376\035\305:\307\265\222<\274\036\226\275H\321\255\275O\311\037\2750\214\"=\304MD\272d\035*;\026\036y=y\304\001v\022\275\221\234\350<-\022\006=\314.\235<\2259\016<\211h\302\275\3748Y<\233\256\001=*\217\201\274\205\230\245Lr<6\377\342<\336\320\r\274\2073\037\275\016u\246\274u\244S<2\373\004\307\003=}<\224<\035\261\340\274\245\326\227\274\231]\326;l9z<\033\022t<\034\004%\274\000w\324<\223\307\344\375\273Ol\345\275\303W\200\274\257j\003\275\226\030\204;h\211\202=\026,s\273\033\031\377\271\256\307\t9\313\324\232<\010\033\211\273\333\223\221<\216\264\017\273\337f&\274\311\007\307<+\177.\275\354\3665\274(fO\275a\233\217\27523\021\275\247<\\\274\034\276\362;\255\261J\275\002[\321<\220\020P\275\035Y<\275\351\271\337\272%\2731=b\177^\274\232_T#\327\274P\3732<|\025\201;\r\\\016\275\321\332\244<\013\263\266\214\2731\307\341\274\330\334Q\273\n\226\317\274{_\036\274\375\237\021\274\372\244.\274,\267\006<\255\207\003=\301f3<\372\220#=\001\230\006=\376\261\347\2748m(=\337\223\251<\201_\302<\351UU;\037\223\257<~\200\000\275\007\363=<\353\232\325;\034=\372\274\263\346\227\273\323\022;=\227\003\273\274ilQ<\245\310\246\273\272\372\224\272\022M\276\274\n\247\222=\t0\005\274\0030\033=\331\222\341<\034H\217\275}2\340\2740\3400<6\223O*=`\363\263<\221Ao\275\263\371z=\343A\227\273\223\027\253\274$\316l\274\303\"\272A\244\2744\327\254\274\344\276\021\274\317\234k<\320v\236<\237-I\255\357<8\"\205\274\301\330Q=\002\304\372\274\303\217\342\274B~\365\274\362 \323<\030\237\277\274\231\315\204\274\362\037\235\274\326u\354;\330\332\024\275\226\230\307;\3246\226;\300&\260<{{\246\274\001]$9\250\004\210=\334)^\274t2\177\274\311\3109=[,\235\274\332uL=\371\253\007\274\267\334\261\274\300\372\366;\327W\177<\017\330\216\2733\344:\275V\366\325\273;\361\3208\010\\\014\275\223\365\315<\351m\212<{c\336\275\024\241*\274\214\006H\275\267\035\243\275\330\263\210\274\224\214\351\2748z<\274\024\n\006\274Ua\363\273\342l\013\274J\267\036\274WZ\252<\221@;\272t\243\373:\274\357%\274\006\205\257;\370\254\325<\344\303Q\273j5\336\273\336\243\010\274\261\022\254:\231M~\275\273\260=<\277\030\341;\327\214\202\273\177\257\210\274\222\266\r<\223\021\310\274\226J\'\275\320\360\000\275jEZ=\030V\260;\0045\232\274E\363\005=p<\236<^\202\023<\302\022\007=\032\263 \274\206\231\352\314\003\274\033\340\031\275\2442\361:\360\333P\275r\222Y=/\243\026\274\273\211\265:\302\276\252;\t6\033\275-\000\354<\2225\305\274\300\311\277\213N\275\317\027\212\275l=M=VL\216\274\354\321\245<\215p\370\274\267VX<,g\271\274y*\203\275\243\245\335;\271\303\210\275\263Z\321\274\241\377\261\272s\275\351\274\361r\250<\304s\340\274\300\177\304\274\247\355l=S7f\274\"~R\274\217\227\224<\253\207a<,\327\251<\221\335\'\273tp>\275\226\270\024\274q\364\371\274\202\210\212<\230:\364\2746<9O\356\273;j!\275\331\334\034<_:%;7V\'\2754\\\032\n\302\274\231\016\"=h\317\327<0Yr:\031_$=\024\270\307\273@&\275\273\330\032g=?\220\227\274\277\261{\274K\235\212=\261I\311\2742\270\266<.U\036\275\233\237\213\274L}\330;\017\363\230<\037F\200\274\33057=\374\026C\274<\003\002\273\334\260r=\257\032\204\275\033\177\230<\"\244\004\275\232\230/\274\207\372S=\336z\362\273\253(\235\274\303\234s=\274\'M<\340V;<\232\212J=_\006\322\274\332V\272\273\277\215\354;\245f\265=\322;\"\274j\014\001=\n\276\353:o(<=\235\000\022\273t5x:g\215\310;E\226{<\"\010\240:g\360\316\273|\265\251\274\346Y\033\275I\225\347:Y\235I<\221\023T\274\261K\204:\2422\241\275\275\001\001\274$\033\026\274\373\010\237\274\037\370\253<\312\004\245<0.\376;\302\334\266\274\342\244\013\275!+ =#\327\252\274\366\010\251<\275\314\254\274\255\360\334\274\'\0147\275J?,\274\375Y\023;8\353\350\274{\203&\274@Y;\275D\225\352\273\366,P\275\261\013\317\273\302L\361M;\370(T\245\330;\347\351\004\273\\\236\304<\371\013\316\274\370\275\t\275\240. \273\261?\003==\001\217<\366\334\224\274\201Ii=\035\211\004:;)\004<\325\370\023<\010$\204\273\025i\326<\324\317\2169\232\346C\274\'\323\244\274\006?\256\274~\347\274\273*G\342:\236\234\206\274\227\220+\274FU\363;2w\326\2742&r\274\005\3608\275\271\261\016\274i\210L\274\300\245\037=\275\224\005\275\276\313F<\003r>\274(iB\275H\001\217<\023\034E\275\006\213\\\274\331q(\275\201\213\305\274j\242\030O\207\275\333\203\345\274\201.\311\274\255\377\363\274g2\265\274\354Q\002\275!\341\014=\367\221\355\273\324\3214:\304\317t=\2625\320\273\231\007\033\275\341\202\264\273\264\320\226<\014\3074\274~d\264\2741\203\202;\037\017G\274X\002\236<\n!\217;\017\376\023=,\242/\275f\352\345\273\374N\370< H\352;\026\337\002\274&a\014=\020\n\325<%\341g=(\262.<\270\t/=w\277\251\274\270@\241\274>d\323\271(P\014\274\371{\225\274\010\224\254\272?Vm\275t}\307\2749)f\274\206\323\n=\271\262\215<\313 \256\274\020\021==\301\256m\274\244\326\002;2\"\370;\020D\327\274aa\275\2745\300\202\274\351\331\224<\024\254\007\274\214/K<\0208\333;\230=\013<\257\302\264<\344\360\320\276\274\315g-\275\031\311\022=\025F6<\275\256\363\316\273\257\3420\275\235M\034\275\346d\315\274\010\316\'\275\276\374\215\274W\026\324\210\273\311\027\315\332<\274\262H<5\343)=\244+\220;X\204\271\272lG\'\275\304\250D=H}\264<\227\267\350\274\201\353\014\275\332\232\224\274(PP=$\336z=\260\336\257\272\327\236!\273\265\331\242\274\317Yr\274\332\371\325\274\233\353N\274\215N;<\346\321\025:j\307\311<\025\364\303;\266 \002=c@\241\273\\&\207\024t\273\307Qd;<\272\216=HC\230\274J\324q\274\302XJ:\0249M\274G\364-\275D\274\232\274p\343\013=\257k\025=\341\210\255\274\336\014=<\354\013\033\275\362{\3129\363y\333\274\037\214#=\240\\\216;\312\247\242:\255\332\223<,b\276;\230\241m<]\323S\275R\311\223<\\\004\334<\337e\t<\352\303&\275\2465;\274%\355\215:{\002<\275\3541b;\343\200]<\037\237\314\274\026^\214=mf\233=7\005?\2758\021K\275\262\375D\272$V\341<\327\213\027;\nG\360\272\031)\240\274IF\247\274\016\365?\275\254\200_;u\024t\273\177\245V\275\367PN\274n!\277\232\2747\003\302\272\203D\376;v\216\235\274A\032\261\273\2331;\275\251\3521\273\275\200\266\274\275\307c\274\255h\207;\223\263\202\275\020H\313<\273\r\"\275\343\275H\272\310\213h\274aa|\274\232\0376\275\274\361\013\275\371s\276;b\340\246\274-\264h\275Tt\251\274\3409\373;\361\024\202\274\013\344\221\274~|\216<\235\224\001\274sV\346\274\026\277\303<}\216\t=\027\330\025<\320\031]<\037\276\272<\263\322%\275*>H<\027\317\305\265\203<\273\n\267\273\331\374\274<\t\203\t=\001\3059\274\375\020\251\237\242\274O\336>\275\202\310:\274ad\377;\246\023D\275x:\214\274t#\034\275\203:(\275\362\236\367\273t\333\344\274\260D\257\274\376\200\n\2740\232\252;4\361}\274J$\241;\372\256\341\273v\203Y\274\371\303.\2735\357 ;Cx\336<\302\340\037\275py8\274?\260\257:0\244\304\274\256\3461\274\342\013\223<\304\237\332\273\354J\024\275g)\251;\316\251\307:CA\320\274Q,\004\275\005T\022\274\263L\311\274\020o\305:\202<\377\366\204\274Y\347\223\274 \022\324\272\332\035\217<\365d\021\274\006`\331;\246\207\355\2743\322\037<$\377e\274\342\013\254|U:\302\325s<\217\351+\272\276\010Y<\333\021?\275\377\363\005\275\246\025\243\274\231\\\207;+\270\r\275\352C\274\2743\373E\274*\331\217\274|\277O=I\236\346\2731\332\\=\215\335\240\272n\327\353\273z\300\'9\372s\211\275\235\206\366;\301\005\250<\351_\347\271\217\241W\273FV\345<\320}\214\236\274\016\273\025<\007\266\372;\216\326&<\317\"\000<\247\007\345\274I\314\217\274\336\206<;\231@\235<\236\212.\274\202\252\340;\243\307\306\274r\314\203;\331\0140\274\257\354\037\274\020F\252<\206\363\025=\004P\335\274{\343g\274\215\031R\275\027\327\227<\366i\246<\247\202\025\275\022\003\365\273\"\002\206<\035Z\307;\225\257,\275\305\360\037=B\231\'\270\214\374\325<\205\250\203;\276\325S=\251:\322:\n\224\373\273\247\\ \275\326y\024\275\361c\026\274\260H2\275\243\277#\275\271\351 =\250I*\274\213\343x:\000\023>\275\240\310\306:x\237\206\275\247\033\365\274\223j\177\274<\235\277<\004\331\222\275n\270\341;\220G&:\226\256\252\274Y\276Z\275\342(\346\274](x\274\3632K\274r\000\300\273S\307\240<\337\263D\275\026\007\247\274\346\300\210\274\300\311\032=2v\222<\241\004T\275\256\342\036\273\002\337\366\210\274\270\316\342;\332,n;q1\234<\343MX\273p\335>\273\343\346\245\274\207\357\355\274\252\334\027\275\347L\220\275\327B?:\3777\004\275m\r\224;\000\240n<\273\327\306\271\367\375f\215=\226I\213<\330+\013\275$\227)\275\202^\336;\355_n=\236\257\367;\232\242\r\2754\235\333<\035[\234=)\224T\274k\270\307\272N\255W\274\036\246\257\275\002:\337\274{\301`\2745At\tU<\000\003=\274\234\334\363\272\315\006\340Q-<\360\005\342\274n\271\005\275&\205\232;\265\223\263<\037L\205=mr\255\274\023Z\256;\205\016\001\273\336NY=9\235\367\272\020\235\257\2741d\376\274\201\207\213<\245\3527\275\240\204\254;\335\231I\274:\304\263<\247w}\272\204\355\364<^\304\343\270:\001`=+\0149\274\237\027\032\333= SX\274\202\254\014<$\341\213\274\256\036\213\275\372\'\\<.\263\022\274\251\305\027;\353:\242\272cx)\274\020\2047\274e\026\231\275\n\035\002\273\014F-=\276\267\361<\322\201\333\274\346\316#\274D_\322\306\301\274\'\266\241\275QFx<\033\020\274\275ZnH=.\252\243\274\307\345\030=\254\365\204=#\"\230\272\225q\370\274\322U\350<*;\200;G\364\336<)L\t\274\255\346\202\274\346\006\007\275\242\364\276\275j^\022\274q\277\207\273P\266\324<\264\0014\274\311\023e<\301\2175\274Nv\257\274\r\322];\034R\367\273\026\351X\273_\032+=\204\202\257\274\253\230\002=\337\365\032\275k\275\276\274\\\241\246\267\302}5\274\3138 \275OK\002\272)\272W:\333\304\241\274f\032\037\275\203\215\307\273 L&<3$[=\351!\234;m\330e\273j\227\233<\240\206v\274XvU\272\341\227U\274\001\001\035=\024\253\206;u\3225\2751\300\005\275\350\230\021\274\251$\263<\241\321;<\325\n\367\273\r\304No9\275\220\300$\274\267\275\006=Y`\333\274m-*\2755\306-<\244\351\313\274O\263\246\274\343sn:.\032%=\250t\304\274Cp\221<\340\005\\<3oc\274[\311\r=\377\206\264\272\\\232\365<\032o5\273P[r\274\346\037\215\272t\256\214\275\227\305\010:@Pt\274QH\250\274\227\370\373\274\002--\274\236\343\020\274-\363,\273[\236K\274d=\005\273H\332\350\274~>\335\274(\250W\275\035\277Y<\262\302\217\273tW\271\273\020]\034<\244x\232<\211,;\275\361\311\010;\201\352L\273\001\246Y\275#\241s<9\242\005\274s\03339\003\321\230=\2735\";,\263\233<\223t\303<\322\356\177\274\026\351\350\272\332\033\257;\036\207\255<\375\216\023\2757\352\374<\301+\252\274.\210\017=S\276\"\274b\023\220\273\'\010\327:\370\002\270\273)\021C\275\031\275E\274O\004\350;\024C \274\223\224\254<\367l\333\2743P\203\273\334qN\274n\034\241=\355[\031=\360\016\223;6\"#<\306/\275K\352V\275\010a0=\254\233-\275V\372\017\274n\215\037;Q\325X<\210*3=j\315\246:\377!\027\275y\212\006\275\362\2072\275\340\205-\274\327P\242\271\377\332\253\274\211\233\274\274\335\365\331\274\3244\206\275\235\333\345<\206\300\037\275V\265m=5p\"\272^g\330<\007\017\276\274l\331\026\275\230\267U\274\337\206&;\340\366\314;X\254+=J\337\204:\254y:u<\245\010\253;\377\007\302:\"\007\244<\025\tO=\323i\030\274\352\250\366\274\024Z\204:\346\260e\274EI\375<[\310\024=\2533\354<8\233\210\273\025\203\215\200\324j<\343s\001\274\007Q\353\273\"\264\034\275\320y\305<\331\255\242;%\265\005\275\365\237\211\274((\"<\317\255\r\274\267\200\325<\224s/=S\244\021;yi\343\274-*\310;BJT\274\211\367\253\273o\'|;\362\367\"\275\203\366&\275J\303\332<\230\350i;\304\2500==\317\234\274\227\362(\273\307\322\236<$\216\204A\274\321L\232<7\307\353\274 \267+<66u\023\275_\354&:\033<\025;h\261\016=\200\371\304\272\274\374{<<\321\201\274\371M\254\274\201\310\250\2749\272b\270qZX\274\272\363\036\273v\232\345\274\253\270\211\274\315H0<\316:\244\275\220 \206\336\273\024iw\274\361\023!<\303\301H\273\304\272\340<\332m\225?B\007=\337\\\203=\207U\026\275,\363\261\273\246D\333\274\233\3440\275\372v\273:T@4\275\225b\206\350\213\274U\224{\274o-\272\2749T\272\274\n!\224\273\370\007\370\274W\234K;?y\n=\275\375\033=v\217;\274z\352X<\366V\251\274QS\023=g\204\204\273\366\3174;\255&+<\267\021;\273R\030\027\274\177\271\177=\214Q\010<@\305?\274j\261\032\274 L)\273\006c\030\275.{\344\273\030ge\274\373\034E\274\243+\256;\210\361\226\274\031\0042\275\342\371O\274!\301\037<\210\345\257<\305\254\357<\367T\210\274P\000\256\274\372\rl<\224,$\274\217\361\'\274\213\200\311\272\311\373\004o\205\274\367?T\274\246\022*\273T\013\224<\2470\254\273\301\300\202\274\366\313\252;\370eA\275}*\017=\256\330\305\274\035\245\333\274\033\031\362<\221\027\310\274\371\021\001\275\231aA:\307@\205<\210Yw=\252r\355\274\345\227\004<;9\026<\245_\220=~\023\013=\255S\007=Q\212,=\253\244\203\272\3177\024\274`\335\210\272\367\261\010=\337\\$=!\303\320\273SU\032\275\254^\302;\243\262\222\273U`\351\274^\306R:\253\360\256\274J\335\254\274\347X!\2753\007\263<\362\261\277;y\350\026\275\200jZ<\246\361\302\274\233c\327<\200A\233;OG\021\274\310\316S<\251\002\334:K\n\202z\274\343\342\345;\022\333q<\013\342\245\274\242\0227;\306A\024\274\010W(\274d\360e\274\005j\"\275\001\231\255\274\037\247\311\273uK\264\274\321\214\352\273C\220\220\274DD\004\274>\243\254;\301\250\032\273:\225.\274\2137\212<\301\332\346\273 \205\003=3\260;\274\007\326\357<@\231\233<\372$\315:?\272\300;\310\"\315<\331\003\006\2741\276i=\317v\243<+.H\275F`w\275j\325g\274xC@<\246\030\255\273t\000y\273c\254;=\314wF=\025\r.\274\377$\351\274\037\360\270\274\216B{\273\266\335C\275\347\010\264\273\325\325&\274\330\2203\275W\251=\274\002\273\\\274K\356/\274f\013\372\274\2269,;\363\362\242=\326\225\356\273Yz\017=\350\357\010\274\377\035\224\275p\254K<\232\014\230\274\253 y\273iL\230\275U\305\363\274o\006T\275:2`\275^[\020\275f*3\275\200,rY<\346}\276\274?\030\334;*\245\304;\347\240\325<\340\235V:\211*\036\274\344\302\003<\257\0354\274\232\014\375\273s\345\314<\215\362\n\273|\025t<\217,\017\275\305\205\276<\326T\214;\217a\313<<\271\213\274\351\263v\273\230A\304;E\275\350;F\352`\273\202\207J\274]{\213\274`\341\267=j\213l;H`\207<\2414z\273\231H\017\273,\277i<1`\177<\356\027W=A\365\377<)\244\366\274\371\006\341;(F\370\273\273q\375\272\246}\022=\254`I\275\034x\211\273<\345\010\275\354r\324\272i\322\213\274\212\tw\273?\334\240<\366\036\212\274|Q\226\272\022\226\013=\276\0175\274\035_\275-\224<=\352\222\351<\316g\235:\235Y\021\275d\240\016<\213Y\312\272\365\257\254\336\371\273\004\321]\275\333\304Q\274\234b(\275\352\226;=\007\230\243\274\302G\270<.\333\246\275i\030\233:Qt\305\273\201w\215^g<\237\227\024\274Yl\355\2740\221\027=y\014\355;n}\262\274\244g\026=O\024\357\274(D\274\274=\257\237<#\017\317\273\n\033.\273\221\200\223<\347\3051\274\036\244\007\274\021|\305<(k\253\274\273\3243=\330y\000=\273\301\001\274\221\027\314\274\200z\241\2737\003\236<\3154\013\274w\330\343\274\232v\026\275\364\240Z=\r\205\365;\264\355\345\274\367\312\240\273w\034S\274?x!\274L\031\016\275F\033B<\206\034b<\254\305E=\0351T\273HV\365\274\013\271\000=\374\016+<\200N\322\274?bW<\203\204E<\210\265\034\275\324\0043<\001\345\207\275\373\007\200<\214\364\366<(\364?<9\2012\272A6n<\234\374\327\274\334\234\023\274\023\317\205:\213\311\210\274\300\313\366\273(\021\317<\033\013\341<~\000\263;]\250\363<\344%V\273w\341;\274[N*\274\321\277\223==\0235<\216\343h;\030\310\312I\210\274;^[;\0312]\273\372 \r<\306\314\232;\370=\317\272*y6\273\256[\242\274P\301\022\274\347\353\010\274\372\353\202\274v;\345<[\342z=\211\260\256\275@\305\353\274kKU\273\242\202\265;J\330\302;\377F\255<\320\2634=C\221W\274d\263\204\272M%B<\272\314/\017#\275(9\237;T\265\177;0\240\343\274\330\326~\274de\221\274\3761\327:\003\201\315<\375sx\273{\317\223\272\301\374\326<\355o\031\274\360\2163;\267d\201\274\255\242 \274\355\352#<\3445e;\304m@\274a\327\207\273>m\364\273\302r\212\2750\344\n\275A~\201<2\365\225<\'\177\315\273< \353;\r7\346\274\360\243$=\332p\202\274\010_\201<\263l\246\274\266\024\355\273y\344s\275\271\230_\274iF\005\006\274\203\207\0077\2348\301=]\310#=\236\217#\2745\3054\274\257\267\030<\376\220)<\r\222\003\275\344\212v<4\231\240\274\242|*\275\006\220\002:m\304\312<\353\001==*\274W=\354P\303\271@\253\235<&\356\235\275\323\224\320\272\200N\036<\024\301;\274\241\277+\275\253\031\356:\326N\222\275=\251T\274\316\234\023\275\247\002\300\275\340\242\304\273\313\304\326<\010\221 <\253\345\t\275\217\3221\274\005\"0\275%\007\035=`\303\250\274\222g\227<\005r*\274\335\311\247\273\332\204\245\273\215t?\274\177:\234=`\362\311\273\025\3267\274\230i]\2740\356\231<[\367^\274)\3236=\037h\004\275\336\027\330<\016\304\023=\325=\230\271\345?\314\274\231\265\301\274d\004\203\274\353\324\000=:\225\200\252\274\005\267U\275\326Z\210<\177\366\374\273\275\370%=\352\300+=\305\304\210\275X\240:\274\263\017K=\335\262\361;\017l\324\274%u\010=@\201\213=\306\262v\274\264\327[\274P\021A\275\227\357\340;\356\221/<\307\257\252<\222\223\255<\030h\025\275\237\201r=}\366\\\274\017\013\006\273\225\030\375<\004\202\253<\235\\\323;\217C:\273\225\337n\2758\0206=\376\265\214<\377\336\333\274\225\326p=\223eg<\t\002\317\274e\316_\275\374E3\275&B\343\274-n\237;\\\010\257:9mL\275\0013!\275\345\254\307\274\377\201\"\274P\365V;,b\227\274\246\227\002=\324e\013\275\035\260\303\274\236\306\214\274\014.+;Z\2032=\361d]\273\277\371\355\274u&\t=%\215\243<$\320f\275\241z\377<\273\203\247;\307\366f<\254\343\356\274\324\231\242;\356\356[<\022\2624<\nC.=\330\237\021\275(\026\264\271^\004\002<\206\264S\274\000\310\304\311\274 J\014=V]u\275\027\033L\272K\364:\275S\215h\274G\277\215=\237\035\205\274\031\021\022>GLW<\230\3513\275E$\313:\217\350\202\274C\r\016;*\302D;\317\023\006=VPX\274F^R\275\n\327N=\350L\025\275\320\355\000\275\266G]\274\244\256\027\275\007\256\221\274e\004\005=7A\256\273U\177\260\274\204\302\213=\350\273z\274tj\344<\322\022\n\274\246\014u\274D\245\350\271\023\t\003=\013\031\243\274\014\335\354;\336\231\257;\361DW\275w\275\030=\277d\\;+}q\275aO;<\026ez=JF\222=\223\367\266\274\363\211*\275N\312\333<\341\354\010\275\206\2371=r)\356<\002\364\217=p\201\233\274\241\374w;+\343\365\274R+\r<\300\327H;\357\227,\274:\034\222\3726\274\337\374D:\346Q\253<)\213\270;\t\262h\273\265\030\002\274Z\362n\273\352\307F\275\322n\263\233A\313\274\037\247\250\274\342\354e\273\373\367N<>&\007\273N}+=\220\322+\307\317\317;T,p\274m\377\300iF\274C\357\001=\226\257\233\221\020<\226\370=<\253l\037<\364\363\242<\026l\024\274\272\264\002=\233J\010\275\325\341;\274\360\261\013\274\351\277\210\274\2102\363\274\366\003_\275\265j\266\2734\367}\275VU\021\272\217K5\2742\364\370;\000A\356\274\340\'\030<\324\246`\275\210\321*\273\035\204\313\274\210{\346\274.\250f\274h\0310\274\244\365\337\274\301\201\273\274\375/\204\2741!\302\274D\250\201\270\217\006\243\225<\305\377\244\273\227(e\275\"\255\344<\312\"\201;\267\341:=\321Dp=\357\2576<:\2232\275r\2564\275\274\232\216;FD\363\274\211\010\010\275\213\331A\275\213@\257=\270\363)\275\006W\304<\213\024\204<\276W\272\274\260\340\027\271G\220\'\275-\342\232<-\222:;\304\245 <\376\016*=\3163\034;\201*\256\274\036\350\221\275p\265\214\275\365\224\273=\260{\311\274%y]\275\024\260)\272\213\376\201<\3403;\274`\333@\275\271\335\021\272\364wb=\340\340T\275\272\362\213\275\210vF\274\002wh\273dY\3479\373\216J<\034\371\034\253\200\275\366\237#\275\223D\013=\020\320\026\273\020J*\273,%\003\274\243\214\000=\344\362\000;\354\3277\275G`*\275\261\000\037\273Eb\223;\004u\353\361<\370?\021\275\260\214*\275\245\323\'=FH\307<\021\325\227\274x7\325\274\017\374X\275P\275\334\273{\372\301\273\016$+\275*`\330;~7\'<\212\240\302<\263;&=\302=X<6\001\206\273\024\330\272\274O9\313:\301O\306\274)q\326<\257\024 \275,\300Z\275\361\016\207;=J\003\275\252\357&\274\354\021\352\274\264D\001=\227ME\274\310\354)\275\254\237\225\274u\344w\273\330\226&=d\t\002\273\277f\217\274\376\262\360;Q\001\033<\324\221\023\274,lp\274\035\253\377\273Z\036\016<\372\177\242\273\336\342\033<*x\030\275\273!\361\274\350;\246<=|\223\376<`\307\010\275\360\305\355:\346\336\001\275\264\320\037=\005\004\025\275\246q\221;k\236\226\274\002b\233\273\344t\023=\351\370.<^\350\220\275F\273s\274<\262$\275\376\231\343\274\220D\211;\207<\270\273\263<\244\274\304\301\275\274$5n;J\t?\274\232\013\247\274\031\r\264\274\311\313\252\274\224t3\274\177\365\277\273\370p\351=x\330\003\275\247\032\340\274\\2\366\274,\372\307=Q\250\271\274T\333\365\274\225\026\220;\320\023\305<\355\346\\\274\362\207\333\272\322A\023=\3521\r=\026\324\007\272B>\247\272 \037\220\2741\275\226\272\032\203\343\274\037\310\240=\370\221\002\273\362N-\275D\1770\275/I\260\274\034~}<\263B\022\2740?x\274}\016\002=c\262\371\274\\|7=\352\336\247=f\334\372\274m\2305i\272\236\271\001\275[\266&=\334H\367;\343\300\024=E.\274<\000\226D=\350T\203\275\266B\256\274\370\207F=B\377E=U\346\277\273\t3\n\275\323\006I\274@\345u\274l\r\247;\311\003\226\274\306\031\320\273W\020\254=q\025\267\273\013$\301<\t\244\222<%;^\274\243\236\247\274\336\330\217\274\252c\346;m3\376<\374\236\342\352\002=\252\337\236\273\301[\304\274\321\3556\274d\020\212\273:\n\262\274\023\204R\273\336\323$\275r[\270\274\305\331\202;\372\277\026;\311\332\005\273\301\211\374\273y\316\361\273b\345\013=\226\371\217\275\022mL\275\203qq;a~\303:\261\220\324;\231\223J\274\340\364\363<\237Ln\274\311l\252<\300\2742<\315\020\037\274\331/\n=\026a(=\326\377\026\274\347\277\021<\220\017\324\274t\007h<\t\'\236;7\263\330\273\327}\326\274\324\"\305;*\240\256=\202\250;=\000\006\216=/\261+=\357b\337;\021\027\245\275\323\275\024\275\026\317M\274\341\334 \275\251\332\354\273F>B;\323\3221\246\263\274\235\271\314\274\353\260m\271\251\222\345;i\257W\2746\244\013<\177\247f\274\326\250\005=\354\371\244\274\\\360d\273A\025R;&\013\215\274\267\360\345\274\027\026\"\272\225\324i<\257&\000<\200\021\240\274j\261\276\273\335 \236\274.\341\262<\212\275\004=E\230\016<\224\257\310\274x\324q\274u\350\332<\355\024\"<\214\0131=Se6\274A{\312\274}\314t\274\203\317\321<\371\230\243\272\262\364\370\274V\215\016;\007\210\220:\216z\364;\355\351\344;)\201\352:2\302\307\274\251\223\363\274H\307\234<\200\336\332\274\236\235\353;\341\036\n\275\013\241$=\246u\027\275\226\355y\275\002y\355<\007\004a=\246\007G\275\265\222\233\275\332\032;<(\004\315\274\315Qi\271\221G$\2750\365\230\275Z#\351\273\020J \275\372\022\347\274\343Uf<\020B\203\274\n\334\206\274y\234u=\030G\236\352\220\371\204\275\357\027\306\274\312X\301\273\252!\301\362\261\274\007\036\264\2728\177b\274Ho\312\274\225\277\001<9\375/<5\223\227=k\212\273\275\247\345\212\272\267\276\351<\306\311`=\235\215\335<\235h\301\2747@\362\307^\275P~1\275\351#v\273\241h\010=^\026\022<\220of\274[b\001\273\031\341\327\274\252 \245:\023\367\241\274\'\316D\274\367\372\266\274\224\263\023:\357\353\261\274\004\314T=\031\365\235\274\212\267\236\274\231k\t\275\272:@;\210~)\275\020`\224\2743\206\010\273\306y\355\273\274\007:\275\350\212\002\275<\006\025\274gA\006=\006]\330:\234!\255<\307\3143<\261\010\274\274\302\\.bv\273\037S\242\274\236\330\177\274;\223;\275\226\271\213\273\031\350 \274\226\212\337<\260\224E9\356U;0\234\t<{\244_\274\201\367\220\274x\\R<\320\351\013\274\020\302\223\274W\'\002<\370F\021\274\274\316\246\272K%\243\274\356\025\"\274b\3263\273mE\207<\001x\277\273\257\'\017\273\254\003)\274\207\221\r\274e\276h<,\024\006\274\277\211\306\274\025\302\033=\267\256\251;G\335\010:;\240\n\275%\320\315;~\245\254\273\332!\016=`3;\274\356\tq\273\232H\341<\030\241\304<\036A}<2\266\022\275p\r\264\273\312\001\354<\264\224\275\274p~\205<\321\316\353:\322\032\332\274\017\274\217\273\350\255\342<`2\314;\265j\257\273\004`\242<\360\361\027=l\265W\243;\221\\\376;\241\306\026\275\006K\236\274\275I9\274!\005M=\030\r\030=<\217\267;\221c*=\250\353\227\274f\263[=\204/\n=V\005\362\273\r\254\365\274\207\245\033\275$\337\231\274-a\331<(\226U\274~\\\023\2751\266\277\274\316\244\233\274\230zJ\275\334\333\033=\354\340\\\274\255R\366<\255\223:\274{\274\352\273\223\267\335\274\211\300\314\274\202\371K;n\337\267;\0346\263\274z\334\243<\257\241\332\273\267\332\321\274\300\216\2509:4\235\275\"\013f<\000\247\206\234$=l`4\275l\326\332<\006g\255\274\273\253m\213\275D\243%<=4U\274Q\370:\275\215+\205\274\357\337\007\274L\266\037<\244\245\261<\226\221h\274`LF\275\273h\013\275>\037\322;\3736\020=\250\002\337\273B\362z\274^\341\237\2757\023X\341\274\202\322\n=$Ls\272\322\003\364\272d\345h\274\243\016\342<\337\263\333\275U\215\374\273e\321\225<\347\217t\275m;\260\274}\226a;\344\247\234\274C\322n\274\246\250\353<\260\306\231:\323\177\001\275m\203\204<\370J\003;Q#)=\217\316C\275y\313\226\274\260\205m\274I=X\274\235\362\361\273o\007m\274\2213\352\273:j3=C\370\266\274\3719<\274\371\266\263\274\330\366:\275\376U\005;\033OB=\tWD<\203\3351\274\004\3140\273\037\213-\275+\303\035<\225\nt\275\353$Y\275\207\315\n\274a\232&<\230\333f\275\301\0321<\021\314\000\273\203\233F<\335\303i\274\233F7<\376r\002\274\307B\273\274G\204\237\274\223U@\275\364\276\216<;\340i\274\2610\224<\206\260\351<\347\200\371\034P\275\261{\355<\223\224\274\274\253\202l\274\260n\t;\323\025\336;{9\357<\014\205H\274\257m\003\274\356\226\270\274a(\021\275\251\035\374\363\274\357\3169<\t\203\016\275\316\030R\275\376\221\016<\r\027w\273\212Zj\273HqM\273[I^\274\313\030\363\273\022m%\366\272\220\266.\274\251\323\232<\345.\253\274\013\207\t<\356\264g\275yc\217\2749`\205=\020D\236:\014o\267\272+8\\<\235\213\276\274\201\334\277\274\352\377`\274\336\026\201\275\327k\270\274\003\275b\275\211D\214<\366S&<1-\255\272\013\271\007\274\340\2342<\311\352\035\273\375\271\310<\344\246\242< \277F\275R&\025\275\352\301\200=\016n\331\2746b\274\273\021y\256\274tE\300\272\333u\373\274\337K\244\273\371:\t\274\000\343P\275\023L\305\274\211\024>;\r\270%\275\263\236\323\007$:\020\341\004\272\212\334j\273]\324[\272)T-r\031\275\013)\031\275\206\024\006\275CpK<\257\341\274\2748W\343\274\235/\020;7\365S\274\007\277\205\274\373\360\241;U\255\370\274\3521\260\274\327\261j<\361u\277;\321\276C\254<&]Y\274~\373\223\274*\036y;\277\210n\273\342;\302\274\247\357,;s\005 \274\027\264\"\275K\255u<\023U1\275\353\366\307;c\344\023<\034x\226\274:\300\016\273\250\316\270:\340\362F\273t@y\273\305Zk=\204wy<\233\312\334\273\264\230\033\275f\266J=\026O\r\274\014\263\021\275\000\000a\262\212K\300;\035D\335\274b\277l\274#oF<\262\343\r\275,/\002=HGF\274\235)S\274\26620;R\263\021\275\224\337\302\274W\025\345\274(\326\350\274\302\264*\275s\312\310\273ax\203<\310\276\256;\204jK=\301\311\227;\343\306\200\274\202M\031\274`T?<\356\020\370\274\321\261\221\273^j\264\274*\016<\274>\211|\273\3606\006\275\335E\332<\342J\247\274I\274\357:IUS\271oz\000\273m\031\274<\256ny\274\365\273\302\274\020\203\300\274\214G\212\275Jx\"\274,\202\215\274\266#\323\225\274W\2401\275\371\202\377<\037\345\014=\376M$\273a?\251\274=\345\t\273rv{\275u\371\363<\3257~\2752\013\346:\205\275Y\272\261?\275\274~\220\353\274\362\025\233\274\223\204\003\274\266\3507\275\312\\\037\275\211v/<\345\210\202\274\305\353y<\326\311]\332p:q\254\265[\361;\177\372(9\335\262\344<\352\271\261\2756{\000\2752\222\362\274\307\021\353\273\341v\366\274\301C\311\274\327\"\215\275+;\310\274\215k\251<\341\337\233<-q\237\274\237\256\326\274f\321w<\032k\n<\264\020\017\275&\252\201\2755U$=\355\264\311\274\2543D\274\036\264L=\273\220>\275\235\360\375<\246\354\272;\024\370\347\273\377\265c;M\r\305\273\r\331\362\274e%b\275\373\005\216\273\1774\255\273\245t\017=vle\275p\341\261\271TwH==n\002\274]\3004\275\027\343\217;\325C \275\353\356\022\261\271\315D}\272/@O<\204\303z\271\276L@\273\303\312u<\344\260B\274\241\325e\275\246\352\201\275\220\306\324\273\001\200\335;0\376\363\274\267\305E=L\3638=\355\331{\274\360\231\320\274N\273\010<\240\205b\275.$!\275,*\033=U\221M\274?\034\201\273\251\327i=\177\252\\\274\243\346\270\274r\375\252<\273\214b\275\340sz\274\270/\204<\305\256\021=C=\320<\030\3768\275C\276\034<\266\360W=\340\225#\275\334\341\203\274Q@j=\342\227\341\274\206g(9,{\261\274LA\220\274,^\200\020z\272\326h\034=|u\360\274d\265\037:\251[1<\222\260-\273\245\227\371\274\027\332\007<=c\031\275\021y\t\275\220\331\235\274\243(T\275\205\313\327\274\323[/\274\230\206x:\361\221%\275t\345\224<\027@\005<\244\000\233\275\344\022\315\273\306(\350\274\273\210`<,\3637=S\\2\2749\302\330\273\264\202q<\374}J\274\017\367\006=8~\231;\201C\205<\241V7\274\242YK\275lY\347;3q\263;L[i<\265\305\007=\354L\220\275\241\244\003\275~\321V\275\365x\033=I\306\014<\354\2322;\357\032%\275\202\317\026\273Bq)\275sW@=\371\217q\274\256\253\265<|\351h;\246#\004\275\3169\222<\3602\"\275\276\346\321<\255\034\304\022\232\273\217\254I\274fZ\334\274\241\232,\275\310\325d\274\261p\n\275\2421\360;]\327\340\274W\353\247\274\213A\205<\350\333n\275\301M1\255\272\373o2\272M\005\032\274\353\r%\275\217v{:\251\271\251\272g\n\335\274\000\271*=\033L4\275\342b\211\274\343\'\342\274*\357\024=a~P:B\311\002=\316o\032\275\303\314\314I\247=\342\271m\274!F2<0n\356\274<\\\303\274\314\354\001=\037\216\026\275\364\340{\275\265Z\016=\232ow<\035\351\001\274b\033\000\275\014\264\250\275\023\006N\274\261\342F;\332 \002\275C\257\210<\013&\376\273\253\327\206\274\004H\275\274\343\304\036\275]+b\275\210g\327\273\2715\207\275\215\306==\342\343\304\274\306\236\274\274\031\244\377:}\317\262<\0041\262\274?\221\301\274\2369\231\274\200\0213\275\2131\246<\033\242\001<7\'\333\274E\372\253\274D\027\267<\024Y\220<\024\210\004\274\225\323\200=tw-\275aG\277\274\332\255@\275\252\223U<\006\010\236b\t\212=\207E\270\274\230\240\016<\243\377\027\275\212\236Z\274-\273&=\014\210>\275\374\037\251\274\352P\205\275\362\330\311<\"\243P=t\014\220\274\306!:\274);e\275\240\353\014\274\022nm<\231\325f\275\221\365\242\274\004\247\003\274A\2150\275\202\200W\274\217\r\227<\233\243D\274\236\375V;@\303y\274D\003I\274\247\332!<\361\301^<\375\302H=\001l\242<\363X0\275b\305w\274\274\243\231\275\177R\017=q\233\237\274\n\372)\275W&\330\274.\207\234\273j\277\'\275G\315\000\274\277\320\314<\260\273\325<\373\321e\275\277?\223\274H\367,\274\264a\355\274\205\344X\275\'\261e=\355\305w\274Rdm\275[\330x8\2250\311\274\337\365\315\274\026\362\021= \205\214\274\177\364<\275A\212\203\274\251\257W\275F5b9\235\277\025=\325h\273\272\376\n\336;>\252M\272),%\2750>]=kE(\275\200\222\022\275\177\327\306\274\370V\"\275\221\244~\275\r\2166\274\0044p=\034m\207\274AA \274\005\203\247<<\3731\275\336}\315\274}?\203\275+\326g\275\030\272m\274\000Q\312\274\341\025H<\0219\310<\254L\314<\310\366\"<#\306\261\274\017\220-\275\377h\r\274\245\257N\274\242\242Y\2755:\273\274\301x\371<\315\242\205<\251\320\2219\204\313\341\274\224\024\024\275\212\206\020\275\036\213\257\274\302\0072<\270\206\206:\224;N\275Jw\227<\360:\246\274\251\017\260\274d^t\275\315\0241\275l9O\274\226f\264\273\363\234O\272)\266^\274Z\346W\274\343\214\344\274\320x\"\275\352\257\223<\241\264|\273J\312\263:%\255\354;\246\221\216\274I\275\177\274XU\001;TiW\275\205\354Y\275\312\300\035\275\200$\017\274\357\000<\275\230\256q\274\264_Z\274K\240\260<\313\363S;\010\322\320\274V\204\332;\023\n@\274W\000\311\274\3013S\275\304\200k<\314J\355\274x\346\036\275&\014\250<\225\204\006\275\314e\273\274Ok\355\271\227l}\275\304t\360\275\255\233j<5\243v\275;}\240\273\241\021\243;\354\016\212<}\204v\275\330\346\376\2749\270c\274\2010\357\273]\264\267\275\363\327\032\275\375\217\352\273\314I\007\275?\025\353\274:u\206<3\255\036=\225\004\226:\250EG\275\0232\030\275G\351e\275\221\264c\275A\317\361\274\305\357\001\275\037U\277\272\260GF\274\307\3755\275\037y\264<\301\200\354;\373\370]\275\376^\213\274\\h\205<^wG\273\3615\016=\014Q\214\274.W\236\274\251\\9\275\"\302\025;\n\273\326:uW\235<$\352e\275\210\3019\275]\305\032\273\005_\027\274VJ\202\274\356-\215\275\221\350z\274\013\267c\345\235\274\360b\310<\"b\204<\245\311D;\266\362\306\274\327\017\264\275\330*K;\032\365\325;\327mn<\266\367\233;\221iQ\275Z\371\n\273\310\326\326<\242\031\357\274K\345/\274\317\026\215\274\014\036\211\274\372\251\t<\345\303\365\274\263/f<\023M\000\275\t\242]<\357aG\274\357T\271\274\224Z[\274<\243\251\274O\373<=\271;\303\274\334\255\003\275\nB\024=\260\222#=c\364\305\274\217\341\021\273}z\340\2728fn\273\220#\206\274b;\301<\237>(\274]x\307<\246\336\211\274|\354\021\275\023`\356;\026|\372=\202d\026\273\213\244C:H\2734=\214\230\"=\336\350\225\274\355&\323\2748\036\005<\212\274w\030\023\273\'\222\t\275\366\336O<\177N\336<\373\237\345\274\247\226\033\275go\205\274]\375z\273L-\242\273R#_\273\254\362\244\274\025l\027\275\262\270\032<\212\242\336\274\2322\010\274\257\205}:M\357_\274\237\210\211\275\236\371\244:\003\0078\275\371\225\226\275\352A\206\274\021\355K\274\317\236\213;m\243 \274\214\246\254\272m\312\350\273o/\205<8\202\013\275!:~<\323Z\347\274\301\225\232\274\240\3060\275%\202\324;\344\235\357\274$\303u<\340\254\003\275&\351\212\274!\273\225<|\377\305\275\325uH<\221\334\222\274\3319$\274\262\024\370\273\014\270\023\275\364i\237\274\022\272q\274\357a\266\274X\245z\274\255\342\316:\n\224\350\274\302\345\310\274\021\016H\274SK\026\275\357+\231=\340\nP\274\030\205X\275\036\001\036\275E\225\227\274\235\344\316\271\301V\305\274@\335v<5\336U\275U\317\265;;O\207\274T\274\241<\035#l\274\210U\256;+\025\315\274s\257\356\273\004\356\274\275F9>\275j\214\010=\005:\315\273;\310\251\274 jc;L\203\241<\247;\r\274\266\3139\275\364 \350\274I\252\343\274>\227c\275\227\252\030;\357R\260<\021\315~\275\273\256\261<3X\254\274?q=;\364\025T\275\332\351e\274\217\331C\274\320E\310\274\247\342\200\274\317\310?\275\322<\\<\013\221\251\274\265\361\340\273\213\027\251<\035\225\250;6+4\275\253\264\316\274\020\333)\274\007V/\275\256<\357\273\230\270P<\334\004\237\274\235\370\'<\272%;\275\332\033\353\273\345\377\251<`\020\334;XF\225\274\303\254S\274i~8<\221\0219\275\2300\356\274M\021\r\275\227\360\243\274\336\273\317\273\243\222\371\274\003s\201<\037,:\274\310\000\312\035v$;\2522\206\275\031\206l\274\211 \\\274Hr\233\274|\233\313<{\326z\274\241\355g\274\347C\003\2754x5\273\035\204J\273\207\357\310;\rg6\274L:\215\273\206\004I\275\362\334\266\274\356Q\027=\'\227\235\274\274A@;_\032\241\274!\006\235<&n\240:\330\202\204\274\357\304\023\275\344\260m<\363\217R\273\247l\247\270~1\307\274\027>\346<<;X\275\036\237\214<\333jQ\274\027 \332\274\003\323)\275\375\033\037\274\214/$\273\225/P\275\r\215\375\271\007Q\262<\367\332\003=\013\263)=\272>\242]\245\274e:\030\274\027B\021\275\025\202\223<&v\251\273\tHf;\022t\235;%@\337;\217\006d\274\221\302\027\274\331A2\274\337\313\262;\347\222)\275m\327L\034<\221\302B\2741\300\277\274i\253\337\273\357\206\277<*$Y;\251f\263\274\327\267\343\274\351!;<_\356\201<\375\237\321:\221L\223\274*Q\277\273\267C\037\273;s\373\"\274\364\3274\275\230\371\224\274\307;m\274\254\271&\275\035\331\005\275\014\026\000<2\376w<\007\324\n=\236\177L\274)\306U=\347\214\364\274\356e\031=\356\343&=\227\203\312\273\315o\324\274\273\251\363<>\007\034\274\220\341z<\322\374\222;\252\371&\274\007\033\036=J\272\200\275\355A\254\274\357\354\262;\276hS<\357,\212<-T\311\274T\324>\274\034\006v<\014\303\032=\363\020\241\274\206\335\003\272\036\240\031=G\034\032\274GT$<\201\023%\275\320@\253\273\370\200\202=\342\375\010\273]\200\034\275J\322><\026\367\253\275\004\014w\274\273!=\275\371D&\274\201_k\274\276f\t=\2141\210<\200\024U\275\342\377\t=\227\376\211<\300\327\007\275\247\205\007\273\0314\036=\006\247\331<\337\266\247<\243\'\r\275M\021w=\"\321>\274\306m\023\274i\313\304<\022e\013\272\360\353\255\275\306\370\265\273j\323\201\274~\260\034\275\234\277\274\274UJ\230<<\303\205\275\214r\014;\375uQ\274b\036\032\275\350\275\270\274N\017\235<\225\213T\273\371\324\024<\356\350\340;T\027\"<\333N\006\274\2468V<\377\217,\274\2365_<\327\002\004=\371s%<\313\2005;}\203\206:\204\0331l\275J\373\021\274\034\177\027\275\254\200e\273&\213\265\272S\007\243\275R\000.\275?\351\212<\311\0012=\350\005\216\274\3707\023\274\220yR\275\305g\212\275\2759{\274]\211G\275I\002Q\275\241\2008\274\333B\320\274Q\305\217:\361o\257O=\334\000~:5WI\274\252|\202\275\265.,\275Z\037\036\275\205\240\2629\231\227\226<\356z\t<\214\272\266\274\025v\033\275\225\360V\275\3050\000<\342NN<\233\254\005\275\177\030\212\273\023\322K\275p81=dr\020<\334!\002<*\333\035\275\007\324\205<)\326l\274\206\232\241\274\350i^\272#\350\'<\227U\270\275K\004\033\274E\256\377:;\307$\275\363)\311\274\351&\211<\367\026\314\2740n<<\177\177\215\275[\315W\275-\273\'\275\256Y\215;\332E|\274W2\005<\024\276\210\325\274\355l\244\274\233\217\311\273\316c\037\274\342\277\347;\370\022\303:M7\341\274\225\256\322<\314\357 \275\333h\312\274[HK\272\345\362\266<\363?!\274\016]\005\274\203\301\244\274(\273\350;\221E\223\275\326\230n\275}$\177<\016\343\205\274H\252\262\274]i\266\274\245\315\370<{i\307:V\233b;\267\\\014<\227\021\276<\272\367\366\273z\326g\274\252\343\245\273.\360Y\245<\t\205\027\275\227\214\311<\025j\036\273\301g\247;5\371L<\205\373\340\275\314\001\005\275 \036\031\271-\334\335;-\363\261\274\315\224\346\274\374\374B\275\321^R\274\372\005\251<-gi\2751\244Q=\\\244\001\275\353\370s\274xZ\244\274|\037\030;\233+\357\273\306\253\207\275\306\371q\2721Z\027\273\277s{<3\354!\275\235\200\327;\2258\014=\245@\321\273c\336\241;\364\312\002\275\331\321\'\272\211\\\000\275\335\225\211<\214\332\305\255b\275-\026E\274\000\350\237\273gP\271\215\224\274\364\022J\2743\010s<\2325f\274\324\005\267<\270\233\021=\273Kp\274\330\237g;`\030\236<<\350s=\371\213\025\275\037\360\222\275i\224\212\2753\r\244\2740\237\230\274\315\221\252\274\351\306X\273\356J\026\274\230B&=\010t\257<\221\232\264;y; \275\232H\256\274\353\300b\274}\024\002\274W\204d\274\302s\361;\025Q\364\274&\227/\274H\361n\275\343\232\340\270a\371e\274$*c\274\356\347\316;\365\313\030<\250\242\000\275\"Q\334\274\007\206\024\274\266\273B\274\277\227D\272\266\301\036\274\237\302H\275q\223\205:`\322\032<\005-\253<\016\356\333<\361]\362\274\257\267\212;\353\373\312\274M-\341;\3372\341\273\304\020\326<\370\234\351\274\207/J\275\334){\274\023\330\023=\220\323X=\235\334\304\274\035>\031=\271{\205\274*\371\016:\333\263w\274\247AI\274\243\231V=|\372+=e\263\240=\273\221\232\274\336\371D\275\001w9;\272Nx\274,\247v<\262\030K=\327\001m;\247.c<\347\346Q=\247\010\243j\200<\255\360\027=l\326\2316\275\316;\036\274k\364\277;a\005\032\275\010k\314\2746\375~\274\273\023\264\273k\216\032\274jZ\034\275\236t\035\275\357z\013\275]\270\310\274(\352\237\274\227o\353\2745\330\274\274\342T==/]\256;&yK\275\262\034\214\274\211\261?;\342\326\205\272?\351\225\274\313z6\274D\361N\273\337\207\017<\177\227\016\275\323\344\256\271L\003@\274f \205;\231n\214<\265\247\264;H,.=\255(\001\2752}\347\274\216\221\372\n\332<\310R\021\275\342e\222\272\033\016\000\275+>_\274M\303\232\274\006k\247<\257h\t\275V\356\216\274\3630$\273M\002\272<\026j\314\274\330+I<\351\354\202\274Y\014w;(\314\273\274\023r\204\274\237>\363\274j\334d\275vzX\272P\325\006\275p\324|9\335>\217\274k#\"\275:90\275\310\250\201=\337m1<~\305=<\305\221\231\273W\237J=\334Y\"\275\226\272\273\273\024?E\275\336)V=\002O)\274\213\003\255\274\330\224\346\2746\342\016=x\3254\273OF\023\273\313\t\321\2740\347\334\274\363l\000=G\351,:\230\014\006\275MO\256:\035\225]<\251jr\275~\326\260\272]\2645<+\250\257\274\245V\n\273\274\267j\343<\314\030\r=\216\272\213\274\025o\237\325\200<\0019\031=|\021\\\274\375\245\207\275%\271\306;0\370f\274\014\257v;kC\345:\347\357\234\274\233\247\244<\362\365c\273W\333\330<\276\204\n`\236<}\260E<\362\336q\275\266\260\007=\023@\236\274\317\005\225\274\177\233\006=\260\216\206\273!\257\367\274D\216=\274e\274\212\274\221>\007=\365\300\037\275\373\371\237\271\315\276\325\271\322\341\257=w\255\005=\370\323n=\246\234\035=\341\264D\274\206\r]\275\230U\242;\035\340;<+\3507<\023p\245\274h\234\360\274\301\253\340\274\271L\355:\r\323\264<3\307\315<<\261N\273Q[a\2749\351*\274\023AY<5$];\366\313\311\273V\006\362<0/\213\274\313\255\351;4\336\243\271\222\n\265\273=@(\274\357^=<(\327\222;?+\364\274M\362/\275\324\006b0\370<\302\302\022;\027\250%\274\334\320H\275=\261\275;\336H\373;\206\337\342;\230\0025=nEf;\022J@\274\227\265<\273>\2153<\201\010\t=\317\3169\305\274D\232\267\274\244\023\232=c\001s<\232hI\274\331\211$\275\016\244\345\272\212)%\274E\3136\275\377\301\276\274\327\341\330\274|\356Q<\207\272F<>\272H\274\343\314y\275\326+\261<\224S\311\274\256\244 \275i\207\032\2750\351m;\225\352\305\274\020\333\036<\201\322\254<-\203\227\275V[\355<\000V\024=V*\004<`e\257\274\014\203/\275\303\271*\275=\306J\275\376\001\025\274\324\201\354\274\322FZ=D\306\343\271[\272\004=\260\030k;\345\274\375;\276\352\346\274B]\267\274\021Wg<4\377@<\245\341a\275J\360\273\274@w.\275;F\235\273\2068\277<\264\260\303<\246\332\302\274N\263\206<\177<\206<,\267\016\274\203\311\007\274\\\223\316\273a\254\361\204=2:\333:\322\006\276\273O\t\341\273l\357@\272\315\t8<\366\331\212\274\240\261\240\274\032\265,\275?\254\223<\342\231\014\275{t\302\274\255\007\242<\201\217\034\275\327\372\214B\275\221nR\273A\251%:\035s\350:?\344\303\273s\341\274\274\332\267\206\274\301\370@=g\302\265\274qq\032:\270\246\302\274\260g\010;\220\214\210\274>N\003=!hh<\013\374\256\274\317\375\247\033\2749\032<\275\352Q=\275\2450\2739\016\251D\275t\301\354\272Y\002\314\274\364+\375\273\031\352n<\305\340\262\273\243\266l=-\310\265;\312\220\005\274\375\315\315;\216\204P\2755\200k\274\027}\225\275z\025\312\274\274\035]\274\032\242\320\272\241,\222<\022\264\024\273\220\271%;r\3006\275\2128N\273\212\330\264;M\237\255<\313\2343\275\265\021r\274\330\224\357\274\232\333{=B\"\222;\346\244h\274\355P\347\274b\3136;Z\r$<\354R\212\275\235\364\262\274\2062\354\274\364\307\014\275~B5<\256,\006\275\242NP;8\377\373\274\255K\023\274\346A\256\275\354\367\204\275\273U\210<\271fX\275\335&G\2750\264\350;u\0060\275i9\366;B\211\350\271\027\027\010\275\255\353\253\274\t\001\244\274\025\376\312\273\033>\225\274h]P\274\363B\261<\374n\206<\200go<{1\374\274\030\332\272;\005]\342<\302\271@\275\020\370\022\037\275X\370\351\273\243\324~\273\321\263!\275\026\342\251\274:\371\034\274\246\227#=\003M\352<#}\027<\263\332\301=K\362\210\275\276\353>\275/q\214\272\032%c\274z\347\304\274\254\307\210\273;\303\016\275r\004\243\274\314\200\230\274\200\2458:\347-\242:R\261\230\27499\233\274\014:\203\273\246\300P;\320\267\254\331\220\2742Wl\274k_\203\274\033\362\366:\245\2057\275\271Xw\274D\010\330\273\274@\007:\275\331g\274U*\002\275\366\374!\274\204\351o\212=v\022\321\274\\\001O\274\212(\315<\003\026,=\264\014#7\215\256\233\273V\231\341\274\205\345\230\274\302R\351\273\334\001\321\274\243\212\215;[\340*=U\321%=\027\315:\273S\244f\275\372\242\262<0\366\214\274\226\026\250\274[<\234;Fn\266\272X\016\201\274\333\000\213;\204\227v\275\321\276\021=@%R;\217\277o\2743-\"\275\347\337u\274\300\371\035\275;\377\332\274H=\341;\037p\307<\365Fa\274\tQ\262;\02470\275\260\305>\273\007\364f=\\\301\256=\033\036\250\273\244\265\035\273\002\013@\274Y\036\t\274`\364\274\2746\230\023\275B\214=\274\325\"\2779-\262b<\207\2178\274\341)\000=\310\376\330\235\314;\335~\355<\003\317\362<\"L\022<\301(\251\273\232\306\371<\276\013f\275\023\325\024<\r\033\363\274\301yM\275\377\263\263<\264\254!=W\321\235;R\021\037\275\032\357\307<\362\242[\274\304\233\002\275G\361K\275,\230\267Q\255\275\370\023\036\272E\303\233\274\"\022\204\2736\303\300\274\366\241\226;\214\010W;\313=\335\274\255\265\336\273JbL\274\327Y0\274\360c_;$SK\274\340\301\251;\362\242\237;|\204\017\270\375\214\037\275k\013\347\274\"9t\274\243\033\332;\242y\006\273\211#\373\273\035\346W\274\225\236m;\360\355T=lHF;\243\323\367;\230.\3509\373\036\233\273D\216\303\272\373\232=\275\311P\307\274G\363\202\274\261\247\242\274\224\036\303\273J\367;\275\200\323z<\022\335\032<\022f\000=\367En\274F\275\001\275?L\r\274~`\221=\347\300~\273\363E\234<\253\265\030=\246\021\210\2748Mb\275\270\340\234<\233\215d\273\324\006\214\374\240\273C\204x\274\233\231&\275\010([=>\304-\274\356\2142\275\257\374\264\274\363\320\302<\017r\351\274\322\356=\274O7V\2731\262\024:\030\241\2759\251\262\236\273h\347+\275\230\325:\275V\373\r\274\260N$=\003\377Z\274\311i\027\274\212\360\274R\010\036\2741.\036<\247z\005<\217M8\274\334\242\247\274>1\002\2757l\211\274\257\363\r<\030\313p=\3078\306<\253!\177\275x\366\033\275\235\231A=m\033E\274\334\316\027=8\272\222\274c\226\333<\235m\366\274\237\324KX\273\335\370\351\274\226$\251\274\325\344M\275\016\3575<\341\331\225<<\346o;\263IG<\206s\213=[@\376;\022\032N\274o_\332<\374\026\235;\270\342D\275\377Y|\235\324\274\253y\265\027+<\005\027\330\274\356\261\006\275[\014\236;\264\267\236\275\n@\246\274#M\225:h2\026\275\014\274E\274\245\364\014\274\313\t\003\275\232\312L\275\014|\031=\337;\322\274t\365\364\274\004\360\213\274V\207\032\274\357\360\317<\007\037\340\273\312E\254\275K}\300\272p*\263<\210~\335\006\224\274\323\303\311<\003V@<\311y\211\275\n\203-<\305D\203\274\375\274\306\273\270.\311<\376\000\024\274\025)\210\275\260sX=\207.\010\334\367\273\336\310\337\273V\224\371\273u_\253:\220W\005\2742\375\246<4\036\220\273\370K\275\274\240\360M\2736\351c\275+\2541WM\274/\334\030<\022\332\321<\327\375K\274y{\215\274\233\014\221\273)p<\274\257\317|<\347\0348;\313FD_\315\2745\347+\275\\\202\035\275\355\013\213\273\221\355\333\273\003\311\030\274\203\261\n\275\273\344-\2739;\021;,j\014\275\027!H\273\232\212\030\2751\217_;\300\3369<\316\311\310\273im\244;}P\223<\336\225m<\366\205\006\274\261\226g<\341\231n<]W\240\273\335\3362=l\277\014\275\316]\212<\003\003\303:\347\231\250\274\0263Z\274Z\261\372:\237\rm\274=s$;\022\270\316;\241\370\272\274\215\327\213\273B\250\327\273\246/\376;6|\233<\221x\032=L`P=i\371\323:\n\373\304\272)\024_\274\250\237\n\2747#\340\274\274\225\004\275\343\210\'\275\315P\233\274-\"*\274Y\362\013\275\202\322$\274\267l\003=\236\201\252\274\363\214a\272L\367\016\024\275\324\007\253\2751\203\275<$\341\331<\305\210\236=\314pj=\177\374\255;\374\212\342\274f<7<\350\207{\274\274\014\260\272b\031\226\275\365pW<\265\353\204<\266\031\260<\220\250\020<7 \264=\273+\243:\336D\211\274\033\021\035=\'K\275<\337\373\231;\237\376\032\274b0\3739\365\346\350<\250\260\342\274\030s$<\363\353\211\274\306\372\337<\216{\002\275*w)\275\347\252\\\274X(\237\272\370\321\315<\202]3\272R\263\203\274Z!\346<~$\220\274\026\274z\274&\200~\274\235\2635\274\3437\256<\222\211?\275\254\210\215\274n\370\034\274$\031\013=\333\335\255<+<\307\372<\343\003\233;\262!`;Z\333>\274\372\351\024\274\034\302F=\220\310\177\274\036\035\205<\025\217G<\317>P;8<\325<\221\315\332;;\353\215\273y$\255=6\274\335<\272\227f\273Pf\323\2742\333\362\272\327\317\265\274\357\000\006\274\213b\267<\213\361\212;\241\363c\274\306\031-\225_<\362\357\235\274\000\213\203\274\367q\221<\200p\033\274k\305\243\2738\t\205\273(.\201\274\032\rm<\240Xi<\366\274\263\273_\032X;\'G\314\274\341;\006=<\232{\035M;(\273 \274q\025\356\274R\367x\273\266\016=<\271\017\001\275S.\240\274J\3454\273\366\326\310=\341?\021\275\241\030\353\274\212\\\030=\022\204\243=\004RO\2749Kd;Y\017\014=\341\017\036\274\325\325\275\273\234\033\215\275\031Cb<\206\210\253\273\364\231\343;?\270\214<]\366\337<|\221\365\273.45=JP\372\274\226<\372\274!\226\235<\236\376\200\274\345\324\225<.\324\260\273\301%0=\234UY\274\013W\213\274\350\037\245\272\360j\211<\003[\232\274\253\207\322<\037\347\026\274\262\232\034\274w\034\"\273\250O\231\274\344\030!=\314\330\336\272\261\")<\237,\270;\256`\363\273\327x\n\275\266\220\305\275rQe\274\363\255\231;\027(\316\274\371\332\221<\001\233B<\006\2628\274\340\3121<_f\216<\367q\317<\030\241\241\273AY\315<\212\177\354\273\252\217P\273\367\343\016=R\024\017\275\212\211e=\317\210\217<\246\276\3359,Y[<\034qH=\034\331\324\274\272e\352\272k\303\307<\366\200`\273\275N\030;\226\253\304\274\234\246\346\274A7\240<\00430\274\3063E\2752\256\026<\204t\003<\365\340\341:\340w5\013=\274d\016<\203\036\257( \275\332\314\346\274\275[\335\274h\r9\273\276\236\316\274r!)\275\265=\373\274\177\227m;\013\204U\273P_\223<\t\2679l\274\264?C;>{\037\273q\314\343\274\204\354\207;p\331\031\275\274\256\351\274\320\367\027=\325u@\275\022\336\023\275c\273\001<\003?\204\275\2142\236;\357\375x\275\014|Y<\233\245\325:\030\241\303<\261HZ\275\333w\221;\310\361\341;\t\324\345;\300\350K;>14<\236=j\274\010%\004\274\202b\004<\272}\014;\312\233\355\273\206\372\201\274\347K\207<`J\266=;\247\022=\216\265\217\274\303\341\241\273L\344\350<\335\350j\0044\275=#\370\272\017\213\263<\305-\200<\306}\004\275\351,\000=\310\002\220\274\222\240\201<\372\224\023\275x\030\337\273aw\374\274u:\245\365\273\026jQ\274\263\307+=*4\251\273x6\n\274\034G\256\274\262\'\032=\305\2633\275fJ\243\274\342q\230;\253\373\305<\333\272)\274J\365\346\274\300\r\246\275\276\034\262\274\365\026\215\273\365\200\275\273w\010\245<\255\026\006\274\306\217\376\336H&;\000\266\014\274\002\037\272\274\360\225\020\244\005=\366\324u<\326G`=Q\324\";\322\373\201\275\326>\245\274\237H\026\274\364\221!\274l\341\360\274\327\036-;)M4\274\205\273\261\274\275\202\243\275\354\214\277<\310\030\335;~TC\274\270\007\010=\013\365o<\262\331x<\330\025\270\274\353\257\354,\273&\366\212=(U\025<\377\002b=\321\365/\274c\365_<\215\365x\274\2610\366\274\234F\231;\252\001\034=\260\276\257<\366\322\201=\331\277\367*b;)\343\321\273\237w%=\230.w\275?\2711\274qf\335\274I\265><\270P\225\273\231\236W\274\n<\t\274\023+\200\275{{\301<\352\177\275<\363yq\272\362w\271\274\356\221\255\273\000S\334<\262\257\231\274\330t\263\274\226\003\277\007\343\274\241\370\224\273\361\336\262\274\003\374\300\274\235\031\031=g\212\025\275\216r\304<\261\271+\274<=\257\272`\264\023\275m5\236\273]\235C\273\344\017\217\275FNE=\360\005\r<\tSx=V\227\024=[\371\230<\3215\362\272\334\2404\275\223\177\263;R\016\033\274`\352n=Z\014\266;H\372,:>\310\333\035\023\275q:B\275\254\332\036=:\230\031\273\r@\314\274\337\363\230<\230\034\344\2733\031\373<\262N\235\275?\027\313\274\256\326M\275z\300)\274\355$\307\274\270\234\307\274p\204D<\331\t\324\274\315\024\271\274\002\002\271<\253\275\222\274\253\272\372\273\322\346\034<^ZR<\310\317\300\275\274\007\027\275me;=\215\022\361:\357JY=\020\240\020:W:\022\275\007\0015;\266\373\267;\206\004\246\274\'\245\351\273\253\\\016<\177\207\234<\300ey\272P\245\343<\023\003\013\275#4\204\2749\347\316\250\007=\300\244\252<\356\225\346<\177\226\004\275\262\324\006\272\027\341\263\272+\"p<\213\2064\275[\224\202<@\215\262<>\244\222\274\023\332\342;\350q\303\274\303\375\303<}L\370<\256f@;H\252\304;\333\230\241<\232\240C<\242\353\235\272n\2761=\312nu=fZJ\273Z\364\271:h\237\372\274\t\364X=\227\\K\274e\017\013<\237\251p\275`&\000=\316\2520\2745\016\236\274\305>A\274\363\336)\275:3?\274\275\251}\274\270\206\245\273\376\266\314\223/<\373\036\2224\274~H\013=k\275\260n=\326\216\351<\026jB\275\370\375\014\273\013H\203\273\360\2263\274\340\243\2658\025\223\345<6\272U\272j\374\356<\367\331d\275\007_\177\274\241?\240<\321E\217\273\265\327\207=\247\377X\275F\034\251\274\217*\"<\034\203\000\275\276\257\030\275S\0312<4\305\210\274\244\004\t!\274\001^\205\274\025\377\271\333<\003B\r<\205\376$\272\307\265\224<\007\374\n\274?\272<\273N\353\375;\207\331[<\n\023\341:\014\231\034\275\023\263E=\300f0\275\307c\310\273\344i\233\272\275\346\344\273+-\210\274Ug\020\275S/\357\274\005\270J\274\256\353\026\275\033\277\226;\247\263\244;\307\311\'\274\347\341\215<2A3\271_cL;\216N\224\273\0316\274\274\\\261\362;&\307\013<\233\003\244\273\213\200\354;\341v\265\274\207\362_\274\276\334S\274\231\\/\034\243\274\007\025\303;\302\t\357;\260\310\213\274\3517\227\2732\031f\274\376\326\017>\013<\231;\354\347|\274!\200\035\275{\t\000=\204\266\314;\003\256\177\275\371\363\'=Co\234\275_\337\340;FQ\256\274W\027\035\275*KD=\221\177\342\274.\003F\275\r\363\316;\370\312#<\206*\305:\221zX=\0107=\273F\304K\275\252V\215\273B\004M\271\235fw\275T\273\031=9\250B\275\3071K\275\325@\027\273\265@\300\273@\374\316<1c\024\275\204x\021\2715xj\274[\254\363\274<\030\355\362<|0\300<\313\231A;\364X\260\273\260\224W<\211\253\364;#\353,\274\352\022K;R\020\036=\202\177\034\274a\366\233;{f\021\275\251\336\t\274\014\350\213\274\217r\345\274xR\253<\271\027\233\273\n\314\n<2\253\236\274\213\234\336\274\203\277+;dxT\274\0212\335\273a\027P:\021\245\r=\322\342\374\273\347\002\251\213\273\210\202<\275\345\324\345\274\266\355\037\275`\331\363\273>\206\302;\"!\244\274\361\021B\275LoU\274\201\362U=\232\365\235\273r\256\267\273o2\207;\306\320\030=^\224d\274\"\311T\274\256\0000\275h#\002\275\3662\273\274\332\0001\274g\247\035\275\010J\021=\305a\326<\356\245\005\275i\264V\274\006[l\275\203E\034\274V\273\02698\231\021\274\000i\237\274k\264\022<\035r\304\273\\D$\274\227\210\206;\021\013G;\354-\326<\002jY<\007\177\310<_G\337\274\232\004\364\272\321\331\307V\273\273\340\335I=H\251\207\274\220p-\274\374-\213<\220\001A\275\374r\273\273\n)\024<\307\260G\275\346\020\215;X\352\032=0%\365\273\353\326\330\272d\013\254\274\324A\370:\271\324,<\205\277A\274\263E\021\274v:\301<\202p\034=#_\201\274\337\261 \275\330.\030\275\231\177\217=5x\241\274\344\312\303\274\324t\252\27431\363\272\005m\314\274\367\226\240\274XC\202;Pv\234\273\001\025\227=\324c\013\274G\333\224<0\305\'<\014lv<\206\271\220\274A\313$=\234\310\273\274k\351\017=\265_\353<\026\367\347\273v\351\322\274{\271\002\273\213t,\272t1M=\022\306\372\271\034\264f\274\215\234\030\2753i_<\342z\222;\030\233\006=\000\231\245\272\006V1=\033\026\247;S&T<\007\2132<\3551\264\272\362)\325\274\026\276\341<\275\220\025\275\2305\206;V\033\242\274\0166\254\274\213\0323<\275\336\235;p)E;9\207\t=\224\347\214\274_\014\276\274\341*\000=3\220\355\274\354P\235<\024\t\331<\336]<\337t\007\274k\252\376:mw0=\303r\247;)\030\345:/\224[\274[\331e\273\321\210$\274<\345!\275\250\022\332;\226]\245\274d\226\301\274\0108P\273\r\260\276\274#\241\357;\214\034<\274\007\242\206\275\223\216\212;\"\377*<\277\177\242\273\024G\225<\335\347\270\272b\2279=d\253\321\3007\274a\324\030=\207\342\242<\265\005\352\\\275\326\3209\275s\211\\\274\020}\212;#+h:n@.\275v\332\026\275\355\301 ;\323V\"=Gz8\275JT\031<\251\2736:XW\033=\036\223\007=4\356d\275u@@\274\326\324\344\274\270O\026\275w\352\316\274%W\307\274/\210\207\275\301cM\275J\"\230<\347\023t\275\027\314\000=A=\300\273\301H\005\275\nyZ<\207]\021\274\001\277\212\274\216\245\035\274m\310\n\274\221\344#\275\027\207\375\274\330\016,=T\255\250<\027\353\"\275c\235v=\371\024\030\274d\301|\274\375,\n\275\211\346*=\225\0312\274\034\242\033<\302\202w:mE\351\272^\351\227^\273(\207\026\275\034\210\025\274=o~\274\004\231\'\274\326\271\275\274\'7\270\274\204\202,\274\265-\207=\212\023\355\273;\214\371\274\222}\325<\t\3261<\242\215c\274\306\305\013=P\354I=$\313U\274\234tb:C\r\032=\364gg=\340\335\351\272\303\341\021\275^\2262\273`Yd;28*=\256\215N\273\223\351\032;S\372\250\274\315\2528\275\304Y\330<\310\006\030\275\013\034\362;8\006j<\363^f<\265\035\212=`\372\221\274\342\207\004=\335\331\030\275\035\'E<\254\253\301<\270\031\262\221H\274\334\254-\274\335]\320<.\220K\273\340\360\361\274\013\002 =\221\241G=\357\257B\275\355\244\326<_\323\177\272\305\323\215\275?F}\274MR\337\274\3726\272<\035\207\335<6X+=bO\002\275E;i\273;08\275\344\026\035\274\032\274r\274\251\260z<\300F\005\275F\337^\275\265\235\371\273\267\027\333;\003\020F=q\325\013;\351\200\037<*\\\212\271\336<\243;\352KK=|\363\303\273\013[\013;L\247ER=\036\033\374\274\311\303\335<^\345\334\r\275\302\254\003=\3550X\275\227\032\342<\205C\212\274\245N\250\274)\007\345\274\335\3456;\314\346c\274\213;\025\274\373Z\023\274P\202\314\272\217\257B=$Z<\273\'\217\232<\234\261\342\274;/\277\274\341\353N\274\007\235*\275\035\014\317\274\200\375\276<\207vR\275W\353\327;lL8\274wCz\275\222^\023<\016\367\321gB\253\274\2417m\274aj\362\274C\013!<\013\312*=\264\236B\2752U.<\031\262d\275VM\260\273\361\'\021\275L\342\021\275\273\333_=\252\1773\275M!P\275\226\272)\273\331\000\323;x\024\252\274\224\024)\274\021`7\274b\265\002\275\220\003r:\226w\225\274y2b\275\312M\023\275\036\035\276\274p\257V\275v\375R\274\356=\333;\034\215\034\274\274K\235\274\3446\003\274\205\263-\275\354\236\027\274\326\003x\274K\313\240;\234\0044<\357\020\003\274\3640\r\274\344\177\237\274\365\2001=l0W\275\343\214\227\274\374\264\227<-\357_\274\021N\330=\032\347\247\274\232|\023\273;\323\260\274\344\213[=\310\\\201<\324\241\010<\020\035\017\2756\n\013\274\252\332E\274ib\235\273<9\262\274\251\336\002=T\353\324<\027@\224\273\254\005\363<\037\304\271\273\215\245z\273\252\223\314\274\226\003*\275\251\032\215:\312*?=6s\002\274)\327\224\274\266\225\263<\376m\340<\226\036m=\260\327\000\275\003\323W=\256\'7\273|\241\206=\254M\214\274\241u\203<\335\023\035\275u\004\324;q5\314\274\211F\363\274\034>\r\275I\206W;\337;*\275\322\021\243\273\\\207[<\270\233\033\275\364\255a;k\000\252\273\375RC;\327Y\237\273)4\2159\202\353*\274\260\202\271\272\265$\270\274\360\321\037\274\245l\016=K&\364<\ri\213\274mT\'\275\322\362/:\276\021\202=W\370\027<(W\255<\246\014\013\274\320\347D\272\265\321\000\274\250\243\276\273\004\365\303<\330\204\245\273\362\350\326\274\031q?\275\303P2=~;\327\274g\336\206\270\263\321\036\275\335o\'\274V\341\001\276\374\036;\274XI\203\275\373\033\003<\222)V\274D\033q\274f\031\312;y\313\367<\017.\377\274\247\240\272\273\220\206\342\273\343\261\027\275>\221\227\274\221\230\335<\230\025\331;\265t8\274\326\362^9\222`\247<\302\220n\273\237\342\351<\266f\316<[\"\257\273E\326\177\274\t*\021\275f?\211=\356\354!\275\033wD=f\211\347\274J\276*\274_\325\367\274\312RF\274\331%*\275\330\373\361;e\203\3728+\310W\274.\221\376\272\271Q\327\273\245\346K<~ \344\274m\344\024=-\270\005\273\004j\300\274\333$.\273G\274\013\275\201\360\202<\250|\211\273k\364F\274\202\230\201\272R\322@<\270B)\274\010)K\275`\204\002<\214\026)\275<\235\266\274\345z\345\272\'\034`\274cH\027\273\036\300\257\274.\317\202\275\177[M<\300LV;\033 a\274\324\220L<\331\361\241\273*\363\246==\247\323\300E\271\351n\255\274D\232\036\275\007\251\210\274\227}\007\275\352F\223;T$\365<,\214\313FA<\372\362W=$\332\266\274, \'\275\026\357\322;\230\003\"=\302\313\277\274\273\356\350\273\001\216\257<\372|\211=\312\257\242\274\344b_<\026m\361<)\356\010\274\253\366\301\274C\347\304\273C>\222\2737\310\331\273pDu;]\326\272\274\256U\241\275nTr\274\303\3536\275\221h\261\273j\3516<\231l\311;\332E\036;\033\016-\275_\375eg\274o\371\031\274\246\315\375\2721c\001=*\223\024\275\006\203\367:[9`\274K$\233\2756:\216<\3307\034=\212\035P\273ava<\035\372\001=\000\351\270<\352\003J;\'7.\273\356\216$<=\212R<\200\245~\273\243\006m<\001\337!\2726\217\017\274g\023\226;R\031\205\274\330\323\213\275\361%\000\275w\214\\\275I\335\343<\005Le=D\364e\274\232\215\r\275\367\364\024=\337\305\251\274\232\303\313<\023\001\206\275\310\372\307\274\220\273.<=X\242;\231\026\273<\306:*\275\334\224\240\274\213\267\017\274\363f&\274y\374\021\275\371\220\'=\000+\260\274\034\014\346\274{\277%\275\374\006A\274\316\246\033\274L\326\033=\301\032\321=+Z\200;~\201\\<\213|\231<\360\207\235;\330\276N\275B\255\365;\370 $\274\262\346\256<\220\203\357\271\245\"\276<\370\365\001;\337~7\275\266<\240;\203\340\202\275\365h\231\270.\301V;\370\352\3338\250m\343\273|F\360:Ph\027=\272\2322=vE\323\241\255\274\223\r4\274\275y\"\275\272\025\"=\334E\264\2749\243 \275\310$7\275\003f\223\274\375\234\322\274B8\210\274\213q\223\274\"\275 \275\231\311\242\274\366=l\2745\307\n\275\376\014\210\274\217\265p\274\224\257&:\032\330\324\274U>i\274\370\2325=\206N=<\340\226n\274\005\2524=\022\354\320<\323\341{\274\177\001+=a\226\334\274fj\020\275jB/=\265\033\005Q\273\'`\345\2741\300\221\275\262!\357:\220\013B:\333Q\257\274\257\356\001=\014\366t\274*\\\370\274\032\034\257;\216l|\274S&\243\274\356/S<.\314\016<\007\261\341\273\025K\270<\260\232\206\272\341\304\256\272\234\250\226\274S\027\202\273\350_\002=qT\363\273\265\003\240<\"\024f<\226p\231\274\306\0347\274U\360\017\273\310\343\344OO;\211;\342\274(\242)\275\0225\225<\200\212\231<\220T\'\274.yK\274\0264]\2755\252\327\274\316\321\035\275\312b\2159\207-\217<)5\377;\220\377\223\274\267\204)<\357\211g\275\275\034\030\274\034\252\002=5\336\230=i\366Q;\003\307\260\275(\272\314\274\247x\033=\201\217\342<\376\221%\275\003As<\245\371\026\274\367J\201\275\266\346y;\335\005\252<\325\204-=\214\245 <\272H\314\274\024\372\202\270B\350\363\200\274\001\324\032;E#\216<.\244\275;\371\245M\274\215\243N<\275)6\274E1O=\\\021\035<\202~\261\267C\340\360\273\300\372\322;.\306\2269\024\334\212\2749\'\232\274\211\352E\274r]\234\273\317\245\265<\203\327\017\274\\7\022=D\264\007<\245\335\010=\264X\361\274\214,\355;\264j\013\340\200<7R#=\2560m<\2352R\275[7\022\275\237\017\350;;\230\311\274\0362 =]\201\004\275\301m\027=\311\277\234\273!\256U<-\350\000=\013%L\275Ih:<\310\222\237\274\242\366\220;\320\201\360<\372\372-=\306E\'<0D9=\204S}\274n[5=o\253\024\274D\326\002;\360\234\243;\335\335\221<\241\257\252<\312#h\272\023\206\020\274qn\370\274\013\367\251<\215\371u<\362\320\214\274\024\261\361\274\376\222\263\274\262\225\321:\"o\001\275\365,\236=h\343\245\274\327\237\026\275\366s\222<\254{\363\274J~\364\275\376\341k<\317\342\376;\322\377o\375\303\273s\311G\274\324C\331;\210KQ\273\335\3447\273\356\243J<33\247\274%d\244b\275\222\\j<\344fk\273\217\355]\274\260\215\223\272|9\342<\021\231\352;\331<\231\274Q\324\212s\366\360:\350j4\274\217\036/b\274\035\261\217\272\224\261-\274\\==<\332Uu<\201\275\372;\206\312\307<{\307\037\273\354\227\246;\024\221l\274\302\272\214=\027\035\000\275\223\333:<\376iB\275\367\256\213;+\337-\275)]\263\273P\241\217\274(\260\313<\240\256\337\271T#\275\273\354\030\200-9Y\214=\005\322\006\275\362\264>=\274\225\t=n\020\261\215@<\004=\247\274\260\">;\324\267A\275\2364\314\274D\233Z\274\356\027s\274\237%m\2759ml\2750\226\211\274\233\304\031\274w\3036\274#\244\013:w\026\251\273o;.=I\206j\274A\210\323<\320{\213<\007\322Wq\033\275yS\317<\275\032\214\274\355\356\201<\205o\213\274\214\005\243;\252\001\231\272\027q\325;A)\365<\035K%=\227P\025=\270\212\365<\200q\r=o\236d=\322\366\314\275\306ev\274Q\014\026\274k9\205\2742\250\326\274>\252\000=;]w<\216\223\235<\025\352\216\206\322\274R\323\263;v\2352\275\206\236m\2758\030c=\276\367\263<\226\304\217\2734\334\244\274\346\325\034\275s\3375\274a\357>=\206\270\212\017\221\274UH\306\273\245\352p<\032\024\226<\004\277\274;gxT\275\267%$<\204\031\032<\251\"u<#\335v\273\006WA<;q>\275\027\026\337\273\217\263\031\275\337\307+\275ogl:\336\0203;\313\232\367;}\r\036\274-\211\217\273\271\245u=\021\262c\275*\372\344<\027\301\351\273SH\207\274\003\356\247\273)\252\253\272\033\361\323;=[,\274\240\337\246\274te\264<\226a\255u\273\377CC\274\320\216\273:\002\315\235<\322\322\200\274\261\256n<\321\273\032=\223\322\'\275\371\326\242\274\000\214\270\273BI\246<\325\256\235\274\221\2008\274ep\266\274\022\005\013=;\357\276\273\231s\322\274\254\360\236:\0336\361<@\227\t\275\\\330\031\274!\347\334;\265L\017\275\212~\016=\315%L\275\017s\317<\024\307#<\363\364\317\274\374J0=\362\271\002\275\333\n\334<{\235o=6\273%\274F\231(<\261\345\037=\206\232\250\274N(\226\274>\254\027=\0060\n\275TM)\2745\006\031\275{\356\023\275\371K&;\"\220\377:\205U\240\272o?\320\273\212\321\315\274%!\203;\307*\311\274\211\323\242\273\315\307{\273\260;R\226\264\274\343j\234\274\224\355\220\274\304\202\031\2753Z\237<\035\251\016\271\277\247\253;\210\253\033\275\277eN\274$\266\262\2739\207\325\274\244u\263\273\311\034\341\023\273\360\r\230\274\254\315<=\004\264m\275\026\354\243\274\306dd\274\001\310\304\274\214\377\221;\210\223\257\273\003\\\001\274LT\"\275\324f\037\274\3777\316;H\222\003\275{\256\224\275SKpH\274\271\240L\341;\223\217\237=&:\021=\322\2021;\236Xw\275\343\017\262:\256\310\225\273\035\314.\275\275\253\017\274\234p\024=\364E\225\273\211.{<.\261]9\232\034\2136\274\362\375\361\275\317\021b<3*\025;H~\320\224\026f<\030\324\331\274\222\025E=\024N?\275D]\201=\013\355\220\274\345\216j=Uz\002\274\366(\013\275R\\\371;\253\230D=K\265\216<\307Q\000=i\277\315\273xI\337;\255D\033;\243\361d<\237_6\274\333\336\351\274\372.\365\272\342\222\260;\"\304\277\273J\357m\275W\333\234<\033\332W<\374\332\223\273\222y3<\177Y\240\2738DU\274\256\257\025\273(\2663\274\323^E\273{\036\352<`\341L<\361%\324<\2354\277\274p0\210=\356\367\337\274\243\324\026\275\370\362\213\274\031f\207<:\204\325;\020\243\343\274\355\265?\274\230\330\313<\270\351\024\010\2758.\377\273\t\024\214\275\205\373%\275\223\300\244\274\271g\232<\365\227\251\273\010\275\203\274\031I5=\326C\213\274n\205\301\274\341\372W;\265\332X\274\316\320]=\017\233\233\274x\267h\274\231\n\267\274?:A\301\335\273\244\276\234\274\320\265\343<\2003=\274\000\354u\274\225\273#=D\357\274Y\314<\207.\236<\315\'\353\27217\370\274Go,\274\215\0044\275\360\222\001=0w\265\274\275\333\273;\275o\'\275\'\030\241\274Y\252\025\275R\350\025\275\2746\235\274\316X\261\274)\034\007\275#B%\275\016\306\274\274-\361\201\n=J\326\323;\301hB=\342\240\233;3L\017\275\322\000*\275\223>l\274\302\262+=\330\324&\273\206\333\032\275\276\004D<\027\233\316<7\355\244\274h\361\264\274\257\237B\274\021\247w\273S\002\203=\300\340#;\322g|\274+|<=\340\227\205<\276\001\002\275\000\r\261\274Z\272\320<\273J\212;\335\361\276\300\021\323<\215\311\214\274g\022\001\274M\214\244\272\300_\237\273\235T\2564\014\274T;\030<\223X\323\273\030~\312;\217&P=]\000A:n\243\214\274qF\235;g\223\321\274F\354\216<|\277\316\274\005w\254\274\360\320\016=\201\304\234\254.<\\p\356<\345\222q\274^\2664=\013F\375<\315s5\274\235\031\333<\324o5<\360{\272\274.(%=&\364\355\272b\312E<\245#\017<\211\246\345;\341\207K<\356\212=\274EL\202<\371G_<\203\273\'=\260\365T\036\275``<\'\306\273\r\252\236\273n\231s\274\207\3067\275\263;K\274\270\030\272\273\244b\330\274\021\325\222=\017X(\275?\305\201\275\035\372:;\353\245T<\276\341&<\366\"Z\275\345\\3\275 \000\272\273\003\020\302\272\0351l=\227\267\354;P\017\032=*\0259\274\275A\021<\'~%\275\203N\266\274i6\330<\356$\361;5\365\237=u\247\007\274\237\223\323<\374\267c\275\203g\264\2739\245\342\273\312A\266\035\261\274\220d\003\2743\203#\274\203L\366\274\\\3121\275\016\025\243<\366\024\363\274\347\t\001=1\362+=u\273*;\202\240\002\275\322\014\231\274\3514\004\271\333\206\363;\305>6;\215\204\220Z)\274u\2753=\276\333\202\274H\356\376\274\331\354\256\274)\246\017=h\300C\000[\275BTf\275\315\357c<\321\201\'\275\313\0337\275\210\311\177\274x\220|\274Z<\324\274\317~x<\014\350+\275@\013\360<\347\321\230\274\303\226\351\274\272\203\014<\361\215\017=y\324!\274\361y\322P\274V\374\342<\311,\034\274n=\360<\3525\032\275\210\366\036\274}\350:\274\031\277\r\275\246\022\315<\375\031^<\021Ge<\361V\346\273\tN\004<\313\010N\230\353\274Fe\n\274\3167\266=r\366\364<\021\350\255\274\001\023\372<\r\264\366\274\335b\231<\037\237\027\275E\266=<\331v\016\275_\003\022\274\364\033\"=+gC\274 \224\n=\317n\354\273\213a\264<\334\017\200=a\334x=J\332\300\274\343k\315\274\377\271a\2742\216\336\274\374\036\010<\030u+\273\204nq<:\2423\275eM\021\274\n\252\242;\253\310\t\275k\025\257\274E\027\363\273\203\"\337<\370\261\325\274\313\307&;$\217A\275\250DG=\300\014\206<\241\254\342\274\246)\317<\"\037\342<\031J)\275\000\217\233<\230M\266\274\032\233\220\275\217\335\257<\342\346\247\274n\232\204<\342\364\256<\232\235\211<\362\2356=\2718T=[LV\274\353\363\024<\331\270\314<0\343\n;\342\242\255<1>\244;P\342\302\272\355\220\327<\'\331U\273\216\006\304\271$tE\275/\210!\274\251\373\016=\257\317\207<9v\321\203\274\220p\026\275\212-%\275\222Z\000;\"\267=\273nH\331\273V@\271<\022\002\304\274N\213\220\274\207L\237<\232;C\274\035M\377\274\332^\260\274\321f\234<{\350\342<\205{3\275\372*w=\242\357^<\235\273\324<\200\336~;2&\200:,(~\273+\341\356\273\024+\0229\204\230\254\272\373\234\367\273\357C\377:\2653D\274\221f6<@8w\271/\373\345\274\310/\227\274\267\005\326\274\320P\257<\354\252a=]\331\332<\310\310\267\274VqG\274\205%!;D\021\320\274\352g\325\274\241E_;\006\r*\275\332\364\022\2751\025g<\251>v\274\217\205\036<\033\003d\274I\261^;\261\226\222\273^\343\306\363\271X\024\261=So\010\275\035\346\322\274\342\301\001<\233\244\230\274\r|\372\273\301\363\355\274\213\225\226\275\370O\035=q6\003\275C\354s\275\323\216\312\274\361\320q<\245\370(\274D\276\210\273\221\030\233\274)b\344\274\245\2444\275\270\254\344\274\20100\272<\244H\2749*\352=\246\030\346\273M\333\022\273\020\351\325<\246z\347<8&\330\273\241(\251;#=\227\322\002=\362\263\224\274\300\020\004\274\330t\316;\277\220b\274_\004\264\273j\034\256\236X\275\221.\035\274\3127\027\274A\215\273:\320\273%\274cuz\275\372\321\346\274\006\177\273\274\227UV\275\253\r\000\275\303\340!=\367\215\275<]\022\241;:\364\237\274\234zF\274\243Q\302:\275\315g<2\250\376;\257\032\317\273\236\333:\274\"\027\023<\336`\021<\317X\326<\247+\210=\020\221\036=\013\301\341\274\321\022F\273L\377\2659D\264\221\273T\305\243\274\251!\277\274\307-O;\301k\203\274L\273\305\272\335\224O=\255\264\222<\376h\253\274\013\376\325<{=P\361\263<\257\225\316<\247\377\t<\234\363\261\275\215\315\266;\242&c\275\377\305<=\322\251\230\272\315_k\273wI\356\274\243\353p<\342\207\246\273\310f\316\273\335\332\250=(\014`\273\373\367\261;\010H\244\252<\307\203\233\274\253\266\202\273k\262\252\275q\215\"\274\360+\204<\235\023\013\274\026\177\253\273H;jL)\217\202\274\027fJ\274\335\005T<\200J\034;\342$g\2759\023\233<\341~\255\273~\026\205\274\226\302\323<\275,\376\274&\315\010=\227\0339\275\362\275\200=s\237u<\342\\\225\275\211A/\275\344\\6\274\370\366\237;\023\351\025\274\264\335v\274\034\335?\274\370]\006\274+r8;\204`\021<\365\217m\274h\242\007;\241\344\375\274\266x\331;\236H\260\274\233\3411\274\022\036(=\202\267\313\274S\256y\275\272\220\r\274Be\227;f\275\225<\037(\005\273~V~\274\264\237A\272\244$\r\274\243_x\274\360)\2327=\360\254r<.\245\240\274\353`l=h\272\323f\340\274\377O\301\273\001\262\204\273\253\375\003:\022\"yst\273\017\031\032=l)\363\274\n\364\224\274\"\236\370<\232\210\360\272ga\354\27313\'\274\211\312\\<\225I\277=\247\017j=\336\0144;X\213]\274\314F\035\274==\372\274\216\326\';7\243\357<\360\372\255;\006\317\026=2A\314;\234\001\307\274g\177\365\274\340.y=?7\200\274\253\005\"\273H\"\322;z*F\275\313\030`\274\204T7=DS\235\274Uy\243\274\2025\200<\256[\023;b\'4\274l\355M\344&<\3239\232<\201\304\357\273vw\340<\'Z:=\252\202y;l\033\"=\341\254\220<\031X*\274,\004?=\027\003\000=4G\362\274\301\362Q=o\366\203\274\207e\'\274R[Q=5\027\255;\223\007j\274\341.\000\275\261\334\351\274\034\r\350<\014R\263;\364\304\362:\210b\021\275/\036\210\274\003\330\335;!|\271\273\277\'5<{\324\264;\007l\236\273\263\317\241;\313\312\375<\272\3203:\260\017\r=L\346\274;\217f\030\274s]\2029\314\305(<\300\317\300\274y\224\321\273&\206\214\274X\204\177;WZ\257;\007\376\335<\331z\240;P\350A\274\332\302\231\273\214\002\3003<\202\276\307\273\267\0322\275\366\201\n<\251\200\320\274\330\246\237\275\2636\022\275iC\014\274H\274(\274a;Y:\366\376\216\274\3661\232\273\226\263\320;\325J\250\274%f\276\273\313rS=\327\206G<\303}=\275f\010\330<[~\035\275\237\241\241\272\330\233A<\244N\243\274(D\322<8y\357<\356\t\344<\372\177\246\274\267bm=\366\203gns=\365v\031<\236\036\241<\035\236T<\241\207\270\274VBc\274\330)\304<\205\363\313\273E\213\250;\302{\220\274v\340L\275pZZ\275y\2516\366n\274\367l\025\2750F&<\211\331\n;3\020\321\2745^\016=\272c\3039K\263\335\274\255\260\\\275\367\r\230\274\342\202\034\275\274\036E\274\210\364H\274\225\033S<\375\r\002\274n\322\215\274\003]\372<\212H\021\275\251\360\226<\014\035\350;\000\346\320<\340`\313\2748\213\346<*\324\334\273Y\365j<\373\312\005\274\247}\016\275k\345\030=\020[\032\275T\017\277\274!p;;\227\'\333;d\3355\273\033\203\335:\254\364A=a\363\237\274\277\306\207:Q~\t\275\343\230\322\2745\237\030\273/;\246\275\250\255\302<\230Y\254\275\'i3< \n\255\274\315\347V=\312C\257\274\223\321Z\275\243\205\257\2743\351\220\274~e\230\274\323\257K\274\375)\335\273\374\020\250<\311\265\003<\226\375\243\273\217Y\364\274I\013\005\274\022E\205\274\031q&\2721\2625\274\005\016\215\274\240k\241<\3774\207\274\343Q\265\274\355\231\006\275\256\367\225\274\034\260\267;G:+\274X\330\316;\023\314:<\t\035\246\271\214\351\342\274\330\243\243;M\267\263\273\330\0010\275J\247\263;&\270\005\274!\206\237<\204\367\340:\256\335\335\273J\2437\275\027\253}<\231w\236<+LG=\016\251)\274\022\216R<\356\2541\275\272\257\377\273\006\230\373\273\033\215{=A\244w=\276\0171\274\334\017\002=\033\013\257<\317l.\274PN3\275\336m\2618\315\0325;\300\014R=\2341\317<\301\366\002<\364^\177=\216)\310\274\025\007\354;8>\335\275\033C\333\274Wn\277<;L\014=\245\207\311\274\236\200\014\275!\363\323\274]\341\356<\334\034\241\274\357\365\271;\344s\357\274\240a\251<\221\370\000=\020\376\021<\303\346\231\275Os\265<\324\220)\275t7\001=\210\247o;\257L\362<\321\346\206\274)A;\275\306\302t=\004\020\342\274\363\031\356;\324NU\275*\213\n\275\257\360\025<\237\210o<\224\320\256\275NGR\274\271\357\033\273\377^\201\274\261\365$:Qc\365\272\262\024\235\273.y\227\273\006\334\241\273(\'\212<\345\010\241<\033\304.\275y\257\t<[>\322\2737\375j\274\021\334(\275\032O\006=\007\343\r=\322\201\257\256$\275\373\2657\273\365\350&=\371\376o<\300\324j;\026t\261<\2727\327;/\036\002<.+\277;\251\276\310;\342O\037\273\271\356\234\271\365+\237\273\340\3233=\004\306#\275\357\202\350;\375\003?;\n\257\333\273\263\233\204\274e|\251\2733\276!\271\374^/\275gP\231\274\251\307\246\274.\361q\274,\013\014\275\360Dq\314\"<\360}\026\275\205Z\316\273\334\270\247\275\356\365\313\273\036\274\304W~<\027\376:;\260m\262<\035\232\204\274\310I\230\274\204\341\360;\336\313\274\273]\251n\275\257 o<\013\353~<\007\267\030\275\227\262\002\274\321\305\373\274R\305\300\274`\3578\274\224,,\272\261e\326<\220\2036<\202+@\274\206B\247<\304\t}<^\263\371<\3622\026\275\004\203\265;\312\311\027=\"L\2022\275\216\225,<\335\343.\275=C2\273L\222\r\275^D\372\273z\374\355\273\223\327\003n\\\274\274Z\273\367<\262\016\334\272\255\3256=\331\2220=\032\300\310\274\310X\206\275\234pA\275\234\340R\275g;o\273\204\006\253\273\2756\230\275]\377\\\274.}\261\2746\324K=3\206\342\274\014\332\211\273\335\023\277\274\360\310\203<\260{8\272N\376%\342\016\274F\301\340\274z\006$\275\240\354\261\274\212N+<\276\037+;\265V\354;\366\002L<\315\026\013<\246MI<\320Ef=\035\244\032=#\237\371:\263\310M=\242o\014\274=\244\211<_\221\210\274\232\340\233< \014E\274\254\331\337\267\273\275\325\022\275\345g\035\275g\347\200\274\365\330\006=\312\375\001\275\276\272z<\'\217\027\272M\177\315;\314Y\227\275\326\273\213;\322wI\2758[\177:\277\375\035<#\0340:\326\333\224\274\241\265m\274@<5<^\342\234<\024I\000\275\322\252\223<\350\271\274\273\031\217(\272\312\217\237\274\372\330\024<\264+\344;0\257\224\274\367\2565<\346\345\024;\n1\206\274\'\177\206\274\001\223\263<\220|\003=s\002O\274\234\342\001\275\221\251J<\323\325>;\363\373P=\022\322T\274\020N\216:\307t\004\274\321\265!\274\255\347\340<\240\250\347\274\313\311@=\317\002\301<\350\205\303\274\2750\363<\330\260T\275\271%h;\2325\350\270\242[\n=w\231\351\274\225k\355\273XQw\275\320\n!\274\266\213\026\275\227oE\273_\n^\275R\333N=B\365\333:\340bZ\274m\341Y\275>Yb\274B\363\226\274\002\266\026\272\024h9\274\266\2424;^\351\205\273\177\346t\275\3135\214\275\206\246\304;\325-\322<\245\010\202\274\221\007\014=g\347\177\275\212\037\225;]#}\273m\333o\273,\343S=x\016\337\273\232\326\351\274\355N\200;`\332\275\273x\013#\274\270\350\224\274\372\251\337;\022\376\024\273@\340\350\2738\034\300;A\026\022\273?\241\217\274Wf\263\310\300\274\262\010\t\274?\302\340\274\327\327\325\274\233_\220;\033\250\276\274\r\230\343\274g\303X=`h\235\274\316\215>=\256;#\271F5\177<\373m\211\274R|G\275^K29\2455\235\002\274[jH\356\365<\300\r\031\275\276\351\253\274;\250\006=\210\r\367\273\\P\336\274\322\237}\273\246\353\244\274S\371\t<\213\003\251\274\311\365\020\275\245\021\301;\315Z\305\274\266|;\274\031z\237<\211Z\226\002\274/\337\207\2732\243Z\275\035\262\362\274wO\177\273\347\246\3409\363^\321;\327\245\035\274\272(\021\275\'\306tw<@\r\003;\336\204\'\275U\r\306<\240\310\277\274\007\243\267\272\006]\365\273I\022\032\273\017\300\242\274z\227eq8\275\260\345\242<\222\277\247\274/9\376\261\273\010MI\275\0234\210\204\231\203<\352Tq<\017\275\022<\rT$\273o*\030=\022\331\210<\264\001\375\272<*8\275=\271L\274\001j!;\307\276\257<\303K\301<\312K\240\274\201\242\235<\177\003\202<\242\213m;\3218i<\277\367\023;k-\276\273\232\230!=\317R\001\274#\316}<\023Ff\275.\276\022<\177\361\213\347x\271\204\023\017=\200\240\027<7Pl\274\226\026\2104\274\244,\314;\265\340\036\274\236\241]:bX\320\274\241.\213\273\024\254J\274\006ch;[\205\245\273\014\023\322\273\372Z};C-+<\2034\233\273|\355\260\003\'\274\005\220T\275\320_|<\267X\305\274&`\320\273\017E\335<\207\037!\273\206\261\260\273\026\241\242\273\202\177K<9\376\224<\265j\213;\227\300\253;\010\207\207\274}\237\2469c\223\243\273\274\355G\275\034d\237;v9\213\274^\2221<\3437\220\274nOP=\255m\231\274Xe\206=\3639\273<\013\222\305\274\002?\335\272N\210\363\274.J\264<\347 \337\274\002\266&=\365I\002U\024<~!V\274\247\200\036\275\020\r\000\275B\021\235<\177\334\035\275\030EX\273\357\024R<\013\272\353<\316\301\231\274f\354c;\357tT\274\243[y<\244\240{\271\234\367p=\n\230\346;\246y><\317\222%\275\344\216:=}r\264<=N4\010={K\233\273Q\027\273\274+OU=\363:\272\312;\230\351[\274P\230\326\274\256d\177\274\t\231H\275\222\275\267S2=\003\021\266;\033\237\005\273\2460];$$\215\2746\326\n\274\372\"\370<\311\006K\2721%\234<.\262\013\274\332\004\030=\022=\201\274a7\227\274$\374(\275/&\030;\375\212\237<&\247\361:\345\264R\274\242\271\"\274\316\301\233\274\n\270\260=\241xo<\217\270\306\2746p\021<\324\363\352<3\007\255<\335>U\273\031A\212<\347\354x\274!\032\220\271\031-\004;\323\304\331<\252<\242<\217\016\305;\001[8<\274\356\264<62\320\350(\274\333X\000\275w\243\377=j\347\233\274\236N\273<\317\330\364;\314\317>=\035\215\277\274(u\365\273~\211\264<\254\370\235;Ca\0020\250\273\2013\222\274\337\307\355;\350n\240<\373>\201\274\206\\\037\274\204\037R<\270\002\253\274\251z\316;6\253&=\346AF\274\001\303\304;?@\230:\360\032\350\274\200E\200<\231\260\276\275H\342\250\273\337\322\312;\302\315\225\274\305\221Q;\331\306\246\274\364\374\221\274\246\320\220\274\232|S\275.\307\3637\032/\001= \036\222\272\211fZ\274\020\\I;eT\314\274\354\031@<\343\240\021=\3609^\275\205\204\336<\306\035\230\274\277,\t\361\215\2748#s=\006TY<\257\033\004<=W\265\273\315%{\273\"\226%<\035\236\312<\307\034\214:M~\017<\373,\273<\326zh\273Gr\253<\234\030d\274\246\026M<\254W\t\274\020L\237<\330+\367<\2238\022;\3156\313\274\034\312N;\3057\224<<\373\215<\254\323\340\274\211\214\313\273Xz\205\274U(\334\274+\321\325\273vZ\005\273\336(v\274\212^\333\272amQ\275s\255\261\273\214\340#\274.\237\351\274\210\216E\275\271\325\340<\211TE\273F\017\355\274\3603\205\273\321\025(={w\205\274>\353\251\273\030M\203;\274(p\274\034(B<\370\206\307;0bO\274\306\317P\274\2461O<\314\305\271;\200%\340\274\234\016\027:\037\265\n\275\257\3167\273A\020M=\374j\244\273\373\215\323\274S\335\306<\262\303t\274\343m\264<\202\010\303\273\202\024\331\274\201j\300\273\367\246\354\274):_ _\274\r\nm\274\351\336\335<\364T\312<\235\252\335\273\257\302_;\270\365\016\275\265F\353\272\376\300\3239\024\277=<1<\225\274\370\030\377\273&$\017\275du\306\273*\257r\274\330x\220\274\365\242\207\272;=\223<\360\335\221<\201\240\005\273\264\203\335\272\247*D\274*f\216\273\303_;=\255\024\025;a$L<\213\251\232=\025J\215\274\374$\017;\310\274\230;\255\307\210<{\233\252=\3569\305\274\375\010E<\331p\322\273E\317\235;z\365\235\274I\236\027=\263\221j\273g\214\242\274J\301\345\274\340\253\200\272T|\313\273%\263)\274\214cO=\006*r\275\006\260\270\273\272\352;\337;\203\\\255\274_\025\337;\017e\267<\034R)\275q\005\207\274\241[\004\350|\273\222DG\275\204\247\335v\215<\035\256\003\275\361r\002\2743I\211;\353/\016\274E\001};\232P\343\273A\313\234\275\353\250\026<\001\004\001\275\357\'\212<\334\016C\220\274)\315\253\274d\316\230;\023\263p;\257l6<8\223\331\273:\226\033\274<9\271\274\236\322\373\273\322\001\252<\321\262c\275\361\254\333\274\230\240\317\274\360\331\250\274\302\202\212\274\350f\345\274S\212\310:(U\221=tu\322\272\002\211\340\274\326\270T<\363cD\275\014%\241\320Z\2750\346~:c\027o<\252\035/=\373J\027\273\272*T<\240r&=\021}c\273\235<\013\275\376%O=\031W\242\274\360jZ<\321\212\275\273\234\213(\275\352\330g\274\023\247\022\275*}\273\272\207\214\337\273\335\323\212\274Q\302\274;e\341/=\032^\302\274\232V\005\275\370\317\313\2737\205\301\272:\222\305\275\342\301\271\273C\346\301\274u\301\253\274L\347\272\274\0170\342;\276\230\321<\322p\230;u\232\316\274qY\037=\r\370\316\273\261\333y\274\253ZW<\276\264\375<A\274s\246=\273\031#\204\274\013H\360\274b9\225\274$\254\374:h|Q\275\317!};\230\211n\2744\022m\274|\344<<\003\025J;\375u\327:\356\247E=\327o\254\274\021\tR\274\372\t\376\274\2151\240\273@\362\205\273\316\206\204\274f\036\342<\353\206\254\275x=\364\273\356\271\215\273\000\352<<\364l.;\035\340\200<\273\262-=\263t\321\274\3705\341\277\274;\317\"\355g\274\271\001\243\274\001\tN\273\253H\242;L\307\200:E\325N\273\254\213\007\275\370\017.\275\322\343\017<,L\267\273\377I\252\274r\021\200\273yZ\262<\244\227\205<\336O\240<\224m\377\273CC\363\2748(\232\274\n\035==i\022W\271\362<\307\274\314PT<$\324\236<\304\375T\273\342u0\275wy\244QF\274\3570\362<\322\365\014\275\237\005\026\274\277\246\355\274o\345\217:\226\033\271<\323\004\226<\307\021\267\272\307G\342\273#K\256<9\023\272<\251\330\035<\243}$=\2248\313\274\306Z\204;\337\202\017\274s_\255\314\274)\304\333\274u\204\"=\276\031\215\275\307\026\314\274\027\325H<\205W\023<$0c<5\233\217<\354\033\217;\315\216d\275\177\246\331;g)\212<\277\275\210<\260\261\265\274\005\344p\274\307\347[=c\225\215=\274\242\264;\000c\274\274z\204\230\274?k\334\275s\204\001\275}m\254\274\207w\251<\261y\026\275\031\326\177\274o\237\001\275\342[/;\375p\324;\217cJ\274v)\226\274\225\214\227\273\025<\247\274\360\373\006=\351\033\201\274:g\230=\207\\:=\362\310\306\273\270QF=\303\037\210==\361p\274E\213j\274+{`\272\030\024\001<\020\350G\274\005\232;\274\033J\023=\330\352\336<\031\177\n\275\n\312\360;\314\334v<\016\305\224\274\367\303\030\274\335\351\031\275\255D\315\272\030\031[\275\016\306\003<\300\034\003\275\343\361\2459\366\356\235\342\010\274\3730\250\273H\317\017;\217n\353<\344Fu<\276\034\244<%=\2259\254\2035\2744W\276;\367\303b<4\211a\271h\364\322<\\\031\262:\202\375f;\274\244\212\274d\265\375<\2745\363\273\357w\356<,\235\2639\315\372\300\274\362\373\320<\025\262\024;\212\275\3423\244\273^a\310\272\372Z\377\274b\245\n= \254\023=\341\213H=H\325F\271\2449\374;\346h\200=\271\310\355\273M\225\000<\242\270u\274\237\325E=\212\367C\2753w\255<\374<\315\272j^\r\272\200\321 =\325p\356<\0071\232;\253\374\t;K\033\024\275\234\346u\274\371\204\013\273\243(\353\273\327@\331:\334|P=\001\264\324\274\363h\344\274\325\201m\274\354Q/\272\235\023\323<\213\255\030;\026\201m\274\320\035}<\213\334\224\2737ym\274\030\331\016\273\234\037_a<\365\373\353;\206\303O\275S\363\357<\227\221L<9\335\271;x\310C\274\351~\256\275w \372;\334\327\0107\271\351p\274\332\016\247\273\243,X=v\351j;\2575\342:\214\277\000\273r\022\326\267\316@\257\274\001B\371;+\346\014\274\246\234\036\275\002$-\274\316\340\205<\016\264!\275|<\020;\214\324\236;\330\315\310\274\321\257\327<0\014)<\215\005\223\273\233\3626\2748><\274\300\236\233\274+#)\274\026(^;\021e\377<\023\0011\275\213\225G<\003T\261\274\300\303v\275J-\271<\363\014\030\275\370/\256<\243\305`=\250\351\314;t{\327\272M7<\275eZ\031\275\224G\270\274\334\303b=\233\276p<\"8\305\273E\227\030\274Q\024\257\006\302\273\220\236\037=\2129\302\273~\345e\274Dd\002\275m\232<\275\014o\021\273(\304\224;\340\\\317\274\364\375\327\207\274)\357^\274\317\242\203\274|\243t\331;\312M\017<\206\020\254\274f\211\214\274\200X\374\273(\356\303\273\242\020\333;\316\010\260\273\314\252\242;y\303\205\273V\337\242\274=\273\306\272\220%)\272\3308\2318\352\243\346\274\212\341m\274\361,\252\273M\006\227\2732U\223=\207\353\314\273\355P\361\274N0\214\274\205\227\013\275\276MB;\360\253\216\274\266\036\361\273\235\276\377\273\036\346\013=bw@\274\340&*<\362d\020<\213\311\225\274\306\364\314<\037U\341\274\013\355\315\273{\247\034=\t\325#\274QB\356\273" - } - } - } -} -node { - name: "conv3/weights/read" - op: "Identity" - input: "conv3/weights" - attr { - key: "T" - value { - type: DT_FLOAT - } - } - attr { - key: "_class" - value { - list { - s: "loc:@conv3/weights" - } - } - } -} -node { - name: "conv3/Conv2D" - op: "Conv2D" - input: "norm2" - input: "conv3/weights/read" - attr { - key: "T" - value { - type: DT_FLOAT - } - } - attr { - key: "data_format" - value { - s: "NHWC" - } - } - attr { - key: "padding" - value { - s: "SAME" - } - } - attr { - key: "strides" - value { - list { - i: 1 - i: 1 - i: 1 - i: 1 - } - } - } - attr { - key: "use_cudnn_on_gpu" - value { - b: true - } - } -} -node { - name: "conv3/biases" - op: "Const" - attr { - key: "dtype" - value { - type: DT_FLOAT - } - } - attr { - key: "value" - value { - tensor { - dtype: DT_FLOAT - tensor_shape { - dim { - size: 64 - } - } - tensor_content: "\2576\366\272!\270\025\273\273\321\252\273}\355\205;\344h\307:\0055_\273\267\363\306\272\217\221\"\273o\005,\273\261e?;\360\340.:0\001\032;w\341\004\004N\2732\333\226\273\336j\0229\250V\217\272\'\203\022\2735*\222:\242\\\267\272n\344J:\271C\035\272*\276g\272\322\300\313:\271\376\224\273" - } - } - } -} -node { - name: "conv3/biases/read" - op: "Identity" - input: "conv3/biases" - attr { - key: "T" - value { - type: DT_FLOAT - } - } - attr { - key: "_class" - value { - list { - s: "loc:@conv3/biases" - } - } - } -} -node { - name: "conv3/BiasAdd" - op: "BiasAdd" - input: "conv3/Conv2D" - input: "conv3/biases/read" - attr { - key: "T" - value { - type: DT_FLOAT - } - } - attr { - key: "data_format" - value { - s: "NHWC" - } - } -} -node { - name: "conv3/conv3" - op: "Relu" - input: "conv3/BiasAdd" - attr { - key: "T" - value { - type: DT_FLOAT - } - } -} -node { - name: "pool3" - op: "AvgPool" - input: "conv3/conv3" - attr { - key: "T" - value { - type: DT_FLOAT - } - } - attr { - key: "data_format" - value { - s: "NHWC" - } - } - attr { - key: "ksize" - value { - list { - i: 1 - i: 3 - i: 3 - i: 1 - } - } - } - attr { - key: "padding" - value { - s: "SAME" - } - } - attr { - key: "strides" - value { - list { - i: 1 - i: 2 - i: 2 - i: 1 - } - } - } -} -node { - name: "ip1/Reshape/shape" - op: "Const" - attr { - key: "dtype" - value { - type: DT_INT32 - } - } - attr { - key: "value" - value { - tensor { - dtype: DT_INT32 - tensor_shape { - dim { - size: 2 - } - } - tensor_content: "\377\377\377\377\000\004\000\000" - } - } - } -} -node { - name: "ip1/Reshape" - op: "Reshape" - input: "pool3" - input: "ip1/Reshape/shape" - attr { - key: "T" - value { - type: DT_FLOAT - } - } - attr { - key: "Tshape" - value { - type: DT_INT32 - } - } -} -node { - name: "ip1/weights" - op: "Const" - attr { - key: "dtype" - value { - type: DT_FLOAT - } - } - attr { - key: "value" - value { - tensor { - dtype: DT_FLOAT - tensor_shape { - dim { - size: 1024 - } - dim { - size: 10 - } - } - tensor_content: "r\331\361\272k\235\033:\301W\214\273\375\035\025<\225=#\273J\342P\273\346\260\207\272 \227\224;\205u\370\270\233j\246\272\233\201k\273\"\366;9z\345\027\273|\250\267\273]e \271\310p\345;\340_\247\272\036\242\320;G}\243\271\2439\230\271\361\035\345\272\317\363\263\273\312\177\335\273\244\316\237;\2530S:IL\002<\300\rY\273x\320\r\025G;\374\\\346:\330\303\273\273x\320\325;\343\002\221;_g\215\272\300S\355:\233\ra:{\347\244\2736_\204\273<\356\255;7\331\':\032\363`\272\371G\005\2731)3\272\2469\322\271\3755\014\272\\8\3518\362\224\353\271-\274\334:\201F\241\272\314\222\266\271\225\324 :wT\242:\263\264\3209\234\0179\272\270`\352:\200\301\232\272y\330\036\273[_\3669H\017F:\242g)9w\337):\270\003\2738(\336\036\273\376\234\005\273\267\316\213:\026\240\t\274\340\231M<\371)\365:\312\'\027<\373\240\337\273V\377\215\273\013\0345\272pA\202;T/\037\272\265\213\211\2729`&\272\203\3435\272\377\326n\272\354yd:\206\276%:\202>k\2729\2443\272\250\027\304;]\023y;\317n\352;\016X\236\270:\302(:,)\017\273\254\017U\273\271\246\020\273\364!\274\273Y\035\207\273\233vN;#\0068\273\350\214F\273I\311\3549=a\336\272\242\200\204\272\352\314\033:\021V\220\272\374\372\334:\177\256v;U\367\3157#6*\272\203\004\";F\317\226:Y\003\311;\036\241\033:R\271\205\273\360mk\273\244\326\341\272D\355\254\2710\376Q\272\\\303@\271\006\304\274\272\204\324\222\272\3257k;+\374\2079:2\027:\222\341\230:\201\203\320\272_\327\356\271IR\235\272\343\33579\272\"\216\272)\032\241:\206\371H;\\-:\272\035_\3278j\234%\272\270\2569\272=mz\271$]\331\272\002\227\3439\007\251\361\273\333\246\'\273\274\320\271<\004\002e:5$\200\273ex\007;\246\300\345\273k@h\273\350\025?\273\377(\'\273\335\302\3149\312\202\235\272o\337\274\273u\367\372\272\002\200j:\265\214C\273\353\202[\271e\016\340\272\370~\213\273X\277\356\271\226u\n:\202\353\305:\256\006\372:\313\345`<\210\320l\272s\037\212\273p\244\177\273\" \201\273\217\373V\272m\036V:\242w\231:\340\371\221;\016\246\247;4\326\271\2711\315\234\273\236\032v\273\334\324:\273\207\000\204:\376\240\215:\216\236\234\273Y=\245<\376\343\\\273\035\252D\273\022\243\275:\360\2074\273[*7\273\024\267\335\2738{I:EU\253;\310\254\347\272\360\327\200\273\207u_\273[\232\244\272&\325\3738\023\250H\273 \255\344:\330\301\n\271\256\336\323;\000\3010;\014\353*\272\261.\016:\317!\021\274l\353F;\221\270+;\032\315\013:\217\231\226\273\006%\244:\244~`;\322l!;\335\377\016\272a\363.\272l|\263\267\270\266\272\272\212\272/\272\264\324\013:\235p\0259\300(\0139\263\213p8\304\245A\272\217\223b\273\004\014\2349H\365q\274\272u;;YnK:*\367\251<\301\347m\272|\022S\273S[\320\272\317G\3537\272F?;JL=;\224I\025<\204S\033;\"qz\273-\320\266\273\003\330\367\272\235e_\273?\200+\273\267\206\357\271\024\232\305\2722\276\211;\354\023:9_\202\300\272\311\325\237:W\335E\271\370\365$:ug\035\273l\033X\271~\365\364\272\216\365\3569\t\321\305:N\371\014\274\260Q\242\273\365\373\020;\335\232;<\246O\232;W\301K\273\267\022\354\272\t\003_<\364\350\336\271\317\314\001\274{\014\254\272\2412H:\325\036J\273cB\225\273\315X ;Q@\351:\n\257\252\2727R\2539\337\377V:q\327z:+\"d:\253ht;<\306\222\273\"\333\231\272\313N\031\272\223l\3449\303Dl\272&\376\223:\375\033\031\273\037\000\242:\234(\323\272\315\024\331\272\002M3;||\n\273z\003-\272\3365\020\272Y\271z;2a\177;J\243\350;\262/\346\271*\217\363\273\214b\204\273\215\023s<8\332j\273\266\257\257;\323h\0149\000\031\010\271oo\207\272\251\007\027\272\267\362\021\273_\326d\271\370\204H\273\352\204\264:\234\210\003\271\324uY\273\322\3209\273\351\006\254;e\237\323;\363\265\320\272)\305\375\272\373w\340\273L^ \273\324.\207\273`\363K<\277\231D<$\227\021<\036\273\016\2740A\307\272\227x\252\273\207\263\222\273?z\315\272\321\3435\273\254\351\307\272\214x\021\273(X\027\273Ow\024;P\303\3779\341\213\200;dZ\037;\003\t\251:\310a\234\272\2240\'\273~\206-\273\307v\303:T\364\335\2714\234\234\272\004i\323\272\001[\206\273/\213\030\273\267\213n<\232U\205\271\251\360|:K\036_:\220i\221\272\317v*:P\377<\272\267\331\332\267?\206\3448:\352\r\272\201=\2418{\377q\272/\213\246\271\361\253\277\271|\366\203;9\022?\272@\251\260\272\371Z\00698\230\020:\262\373\335\271Te\035\272\010\306\331\271\214\347\356\272\300\n\250\273,\036\223;o\306\370\273\2441\335;:\203$\273\277\347\331\272\270A[;X\362\224;\324\3374:\2274\2169\260fS;\034S7\2739\334u\273}j\240\273#\364\276;M\216\277:\373\265\2239L8\244\271\026h\376\270(\033\303\272\216\233@9IA\2279(\317\013\271-\333S\271\305\240D\272\250\301\214\271\016c\2105\3568#;\252\341\352\272\347\201m9\353\214\321:\227*E\272\205\255\n\273\225w\001;kU\245:5,\0369=\372\3018\302))\272\211Y\033\273\272\241\200\272\226\303\n;\346\214\221\273Hg\022\000\273\242\315\010:\225\344|\272\356\006\224\273\346\257s;`o\204\273\007\326\r:\027\014\375:D:<;\241\306\2219>a\261\271\220!\020<\327\342n;(-B<\202M\324\273z\275\':qxS\272\255e\013\274\000.\200\273,%\307\271Z\336\236\273L\371\000\274\334-\210\2737\220\300;\334\224\207\274`\177G\274\37318=W\246\243\273\304\3439:v2\203\273YD\354\272\313\374\237:\343I\001:X\005\2639\263\nq\273\310\241z;B\351\337\272\222\350\r\273\266\213\371\272\322\313\004:y\'I;\001m\200\273\255\341N\272\270\235\267;M\242q\273\225,\304;\010?\336;\303>{\273n-V\273P\357\357\272-\364\210\272\340\312\"\272,\'\3649\305\260S\2710\000C\273xaX;r\315\215\272\272\317\020;D\346L\271\201\346\253\272\334\001\3279c\241\2529!i\2178\375b\260\272E\001\2259,\376\3139L\322\305\271\365\016\204\271\312^\2619\203\352\034:\013\034\364\267\324\267\223\273/\240\370\273\246\230\355\273\324\017\213\272j\340\306\273\017\317\227<%\321\373\273\027p\240\273*\3418<\002o\271\272\304\260/;7\243/\272<+i\272\246\211\206\273v\035\223\2705\242\304\272]\250p\272\211(\013;\227\034\234;[-\316\273H\251\222;\341\273?\273wS\336\272\272\273\326\022;\372\252\033\273_\354\025\274\221d\343:LW7\273\336\354\000\273t\024S\273\3655\003;9\276\273:\307\364\302\272\007\217}:\372#\202:\355\225\031;\334Lj:J\224\024\273(q\210\2736\n\255\2739\2267\273\377\356\2049\247\204\223:Lu6<\341\247:\2716\006\250:\276\374|\270\356\031a:\345b\217;\231p%\273\225\022\246:\375\265\027\273\205U5\273\300G\361:\354Gv:\320\266\323\272\270\353\017<+\241\252\273+!\035\274\305t\020<\336\254\326\273%\242\367;\002\263\236:h\001\246\273\317\343\371\272^\353\377:+\336`\273\243\000R9\350ty\273\346b\300\273)\203\311\273\247\365\375\271T4\010\274\353\322\035\273\016\343\356\273\227\362%\274z\372\235<\266\253\215;\302\216F;\364O\005<\220\245\3449\3709\2178\027\272\025:\205\r\263:`/\344:i\340\020\271\372\314\006\272]\321\303\271\rk\200\272\203\252\016\273N-5\272\ro\237\273\374\265\305\271QG\023\254:y\355E<5j\341\272\213\301\034\272\016~\031<\034\311\n8\337\322o\273\033\035\030;\221\306\010\271\316o\254\273K\034\014\273\033:\203:QV\264\272Tr%\271]\323\357:\262\204 ;\216Nv;0\201[\272\026\372\377\267\013LV\273\243\024\001:BCJ\271c\260\2309\320#\223\273m\220.:\254\370\027\273M*=\272]8\321:\262|g8\024\301\021\272\243 \016\273\3735\3559j,\243\271\260\264X;3\374\252;\206O\325\272rr\020\272QC\336:\035\335-;^\301\035<\245\035\210\273\035\351\322\273\313\000\212\273E)\021\273\310\362\360\2727\276-\272a\240\021;\344\252_\273\260\021\000\272\306\303t;\376\'\r<[\203;\273\255\'\035\273\335\300>\273A\364\210\272\3431\0179\t\213\204:\314\376\000\272R$\333\272\337\215=\273\332A\\:H\323\203;\263\266\002:\251\034\326\271\006UX\274dOz\273\205QD\273\277u\205\227:D\324\214\272d\374\273\272\t\'\227;\215`\211\273J\001\272<^z\013\274\016\312\222\273\322\017E\273\275\n\320;\273\030\201\273\370\273\004\274\014\344F\273\023_\300\272;\365\2379:\341L9\276#\371:\n\220c\273\031\215Q\272\356&\314\267\261]\237;e\n\311: \275>7;=\312\273`\242\332:\006U<\273\002\212\004\273%&\025\272\360&\n;\371Rb;\330\003w:1Xb;\335\\\3639\330\307\214\272Z\037\230;&i\231\2739\202\206:\275)\224\273A\022>;S5\245\272\272\000\224;\326\332\003\2737\\\350;\374\017\225\273>B\271\273\310\336\2609\250\321!\273\233N\373;\023$&\273u\324\004\272\233\'\221\271B\013j:|\303\377\272\016\204p\270\237T\200\272\033V2<`ZJ7<\373\322\272(=n\273\374\312\':\234J\317\272\225\316\341\272\010s5<\024\246P\273\263\032>\331\232:xd{\271&O\213\271C\313\2518BNU\271\372|Y\271\005e\3409\022\312?8\004tT\271\374\0202\267i\300C\270\267jC:\363\325*\272\333\377::l\343\374\271\036\t\3468\3078\350\267\'\004\032\271F\203\277\272\343\315\232\272e\370\321\2723\350t<\367\374\323\273AV\016;\361-\323\273\027\361\032;\324\365X:\002\253N\273\3264\322\272\213S\362:s)\207;\372e/:(E\3249E\3426\273\360\0300\271\350Ma8\241\262\275\272\376\030\217\272l\364\2429\323\243\211\272j\034u\273\224Aa\273z\237\257\270O\373\005;\217p\335\272\312\324\324;f\371\234\271\361\001\276:-p?:\347a@9\364\"=\273\210\367\033;Ay\220\272Nb\271:@\376\027;\241n\3039Rl\035\273\031V\204\2726\033\005;\200a$;\312\243\037;S\361\236;\317\367>\273{\367\245\273\n\255\3349@<\272\272rD\250\271@\213%\273\276\003\254:\364H\276\272\036|\246\272t\343R:G\370\316:\332\322\303\272\376w\003\272NH\026:\215f\324\2712\267_:\"\225\273;:\006/\273-\361\r\272*j?;0\352\226:0\005\032:GB,\273.\217\314\272\010\031\220\272\337\343\366\272\326eC\273\010\034\215\272\234\225\333\272b\351\241\273\342\241\33499\2572;\315>\212<\317D<\273-VQ\273\215\rW\273-\242\205\273\020\312\r\267kF\201;\'\266I\273\313%c\273\247\\\310\271N\372-\273u\331\016<>L\026:\207q\3419\375\024\325\273\024B\365\272\237\367\227:x%\273;\272\t>;\366U.;-\321\314\271{D\262:6X\316\272c7b\273\260s\253\273\022\177\025\273Q[$\273P\332\251\273\303,\206:\002Q&\273f\3510\273\361\332\204<0\224\3248D\036K;\365\316):\250\256\257:\257B\024;9\310\310:\370\356@\273V\032g\273\212\023\246:\'\351\272\272\246\005\320\272\021\205\037;;\251D9\225Q\2719\346\364N9\233v\304\272\241\025\3739\3318\003\271\224\222\2618\356\312\3239C\353,\272\276s\027:\007\332\272\272~\331\375\272\344\320\"\273V\003d;\253\313\251\272\243\036\006<\360u#;\r\014S\273x\016I\273\212#?\272K\332\023<\323\344\261\273\345B}\273\274\267)<\024\334\310\271\254\026\236\273L\224\230\273)8\251\273r@\200\272$\343\301;;\036A8+\027\345:B\366\255\272\360\302\206;\330[!\273\217\021\005\273dJZ;\214\177\207\272\325\2071\272B\254\337\272\r\002\217\272\014{\362\270,\005\246:\253\327\"\272!M\302\272\031\302\201;\265{]\273\272\330\204;\270\207\202\272\221\016\321\272\357I\225\272\"*\227\272\362\r\037\273\353\035\020\273\316%g\272\264\337\253;2e+;\024A\032\273`<\"\272\347\332>;\263\\F:s\177\227\270F%\205\272\314\320\r;H5s\272\236\235\252:\370\3274\273\270UW:p6\321\272\267\005\256:\355\232\270<\242\211\033\274x\214\343\272\377\212e\274-\337\306\273\201U\264\272\366\\\270\273F\253S;\037&D<6\202\026:h\021\371\272\341tV\273^\002L\272\271\367\n<8\004+\273\313\317\352\272!:A;\006}\273\271\341\330\204\272\346j\2319\300i.:\221\373\302\273\356\037\024\273\327e\342\272?s\212\273\275\260\226\273\270?G:\253\220\212\271U\030\302;:R?\334\2109v\333\220:\233\025\026:P\035\335\272e\270F:\3018I\273\271\207\014:bS\003\270\t\271\2057\240\010\337:\345\377\365:\265A0\272\264\273\263\273\260\335\026\274+\261\226<\343\022[\274\330\232\310\273\266oO\267:\327r&\273\\\025\315\272fI 9\345\357\3369\016\350\303:\230s \272X\226\206\271\362\013\344\271\0174\2749.\344::\204\353\317\271a\331\2439\034\321\254:\232\207\374\270\260Ag\272 \275\327\272\300O\344\265\014\241\300\272\232c\374:\311A\222\272\332\341\260;\231\331\032\273\351\363\212:\215|B\273n\246\232:B\t\372\272\010!\033;\205\210\320\271\276\270\':\010\361J:\323\274\261\271\372*\2719J;\2268T(X9\343\257\347\272\306\355\274\271cr[;\313\236\326\272:\247\3319}1\337:\360\'\345\271\230)\215\271\340\306\3549\316J\262\272U\370\362\272\000O\314\266\216\323\214;N\257\216\273\226\237\264<0\303\007\274v\315\225\273X\017\034:\310\233\n\271\362!\245\273\305&\225\273U~W\273\344\232|\273\032\212J;\206\265\215;\024\316\010:\201\305#\274VMV\272f\372\'\272\304/\034<\326\262\223:\346\026\322\272\022\035\221\272!J\202;\006\372\006\273\274\364\017\273^\262.\273i\267\235\272\302x\037;\271\273\256:\370\253J;\323W\037<\206\'T:\240/\033\273\322\000\256\273X\335\231\272E-V\273\032\302\316\273~k\0079\253\257\\\273\244FY\273\351!\226<\352\315\353;\206\207i\273\'s\342<\200\301\016\273s\327*\273l\377\241\2738\266\037\273\252\333\2249]\317d\273\361\217\251\272-\344\362\273\201\314\334\272F\253\033\2733\305@\273\331\246>;\231\212k\273LU\':`\271\036\3639\273\0037\241;;\317\221\273x6D;L\343c\271\361\014^:r\306\213\267\223\261\035\270\032\344\317\271\221\324J\2725\027\3619{\325\2459\260~\t9k\324\243\271\251\255\013\274\346\241\244:\0025\265\272{K,\274)\257\221;0\007\0068\331\303\211\265#: 0-;\264\026\316\270\002\0216<~\212\002\272G\314\206\273;.n\273\224\225\3369^\017\037\273\306\305i9\217\022G<`\224\027\273\3748\267\272R\017 \273\273bt\271\314\252\267\271\375\371\340;\023\303k\273\256\321\204\272\t\303X;\340\335\251\273\020\346\211\273\r\n+\273\016\330;\273\037\247$\272.\302\"<\024\223\023\304\273r\020\316<\365\251%\273\3759G\227\220\270M\364\336\271\n\377@\272\3162\236;\016%K\273 \262\030:I\302\363;\250\306\331\272\254\002^<\277;\254\273\334\271\211\273\005\360\370\272V\356\240\273\277d\347\273\257e0;\231\357\253:\345}(;i\277\253\272\371\264`\272\265\340\366\273X\2323\273e\000\024<\353\345\366\2711\367\352:\307\237\212\273\000\212u;\277\003\016;\335\223=\273\024\222\351;,\354\354\273@Z\256\272U\222F:R\200\220\273\316\342\350;\352k\'\273\251\347\244:a\357\353\273\246\306r9\343\355\022;A\233\355\273L}\236\272\216\244N<~T\361;B\250\035\273\330\204\267\272\031\227C\273w\365x\273\333\363\2379C\264\206;XT\311\272\316\355\221<\013\016\361\273\350hS\273\010\332!\273\345\257\333\272\277\376\r\273\311\204\354\2728\202\207\272y\221\346\272\2249\2317\367B\277;\030$\275\272\227iK:x\203\307\271m\262i9\177\371\342\271\326\005C\272\036\261\201\273\000w\257:\311\356+;o\252\t\273\r\311\013;\272\356#\273\320\013\370:\320X\006\273V\234\\;0\337\326:\010\312K<\314q\232\272\221x]\273wz\201:\276\363\266\272\337\256\374\272\203\037$\272r\246j\273\001$C\273\010)\323:\t5\213\272\265\325d\273\362\027\025\273\272:\265:\314\271);\351%\304\272rO\037:IpL:\342~\261:\207G\026;F\360\"\274\233\002\322\273.\253\224\272\237\266\004\274:\373\255\273V:\273\273\204\027\311\273\330g\242\271_\371&=\260^q\273+\244k\271\021\233U\273\307\t\333;g5g:\325\233\360\272Z\273\034\272\021\237\023\271\255\360\034\2737\275\225;;4\036:\377\352\240\272-%\332\272\200\022\2408`\375<:\247\320\307\272$\\?\267\016\201\0076\235\242t;\350\2069\2724o\362:!j\000\274x`\241\274z\266i\273WF\215\273\324.6\273\'\375\202\272@1A=m\325\272\273\363\001\207\273\222\311\274:\\N\n\273\001\307T9\020\323\321\270,\236/\272)\323\262\272\321<\200\273\236L;;\036\035\034;\212\031\247:\243\211\035\272S\216\365\2726\212T\273C\200\217;\214b4\272\375L\301\272AqF;\207\243#\271\\\335\014\272A\345\230:\370\347\004\274v\230\030\272a)J\273\273c\275<\377\317\255\273\216\267\315\272X\t.\273z\374H\272\325\027\007\272/\221\n\272\275\303\213<9FJ9\254\362\376:\216(\256\272\230: \273\246\235a\273\240\351\226\273c\255\271\272\t\226l\273D\017\032\273\376k\235;E\250O;\260\0054\271\263\026\221\272\223HR\273\215\000#;\345\216u\272:\r\354\272|\367M\273\265X\301\2700\005\263\273\223z\034\273\244\002\315\273\314Q.<\360J+\274\325\336\316\273`P\203<\235\355\";\014 \221\271\323\337\002;5d\007\274&\016\344\272\305X\017\274f\300y:\340\213\224\273\243h\232<\276\233d\273\342S\017<\342,7;\227\301\225\273\205\212\220\272\272\231];\366\263\343\273\266\222\304\273vO\205\273\304U\013;<\233\214\273\270\273\211<\363\233\331\2730\211\334;T\363\366;\220\220\2349\002\264\232\2717\314\315;\207\204\332;\206;\216\273\005\217\017\274[\316!\273\325C\002\273\222\376?\273\335\242\250\273\374\250\206\273\014\266a9r\203C\273\3312\331\273z\274\313\273\303\332\033\273xs\262\2732\311\022=+\363M\273\355\361\353\2731\310\032=G^\340\272\326\024\3569P \236\273\221q\311\273-\270\032\272\030\363\266\273\367M\313\272\0215.\274\266\234\304\273\227\305\204\273\360\017E\273\034\304\271\272n\252\035\274\217]^<\223P\203<:\023\202\273\214U\375\272H\265\3219\311\2658:P\300\215;\224\336\276\272~q\363;\247\332s\273\352\'=\274\302\260\026\271\331k\006<\275\375\026:\255U\217\273F4\320\273\223\272\033\333\357\273\33611;\204\021\206:M\303U\273o\010\356\272%\014u\271\0011\035;\235\316\3119K(\256\271\223\240\222\271F\320\031\272\213\016\016\274\250\303\2157\230\374\222\273.\260G\274HR\316:\320\242\253\2716*\001=b\005\342\272\376+\022\273\000Qe\273\334[\225;G\346Z\271^\221\261;a\265\215;o\271\223\272K8\372\273\332=k\273a\220M\2737\274\016\273\344@j;\010\302K\273\332W\216\2739w\321<\302{\343;\340\211\277\271\335\360\036\274T\004\362\273\372\236\025;\360@\255\273\230\317\226\273Xr\030:\255\224\215\272\357\307\276\272\336e\222\273T\221\335\273\236\302\213:\373\273\017<\023\345s;\237\2229\273o\334 ;\236\376\330:h\236\014;;-R\273\357\027\313\273\210\274\\\273OWR\273\214\351f9\356jp\273\214\252!<\255\322\274;\022*\334;6\324#\271Z}\003\272*}\261\273`\223\226;\016[\350\272\324!\206\272\332\350\204\272g\316\371:R\230Z\273\227\344q\273nL\356\272\021c\343;\321YF\273pF\023<\013\313\3438\212\251\200\273u\200\006\273\350kQ\273\246K\333:\3606\271\272M1/<\361\375y\273\032\357s9\237\314!\271\346\021\366:\001\021\266;A\274\206\273\016\341k\273\270\217\254\273\214\202\3758z%&\273\254\250\235;\364\317\247\273\224\267\324;\035\000\2209\2653\256\272\351\274\031\273\335\000\2379S\302/\272\003R\341\273\317\302\355\272\202\276\254\273\320\010\005\274+\356#\274M!\236\273\320l\221;Ot\034=\365|\342\272\231\337\202\273\353u2\274S\036#:\020R#\274\033>\253\273\321\367\r=4\2157;\377\3070\273\242p\233\271\377\254\027\2730)\333\273]T\003\273A\022\367\272\271\326\346\272\363\203\2448\251=\n\272_\267\312\271kJ\277\271\007n6;N\034w:A\277L;gz/;\310\372\037\013\246< \200\036\273i\000\014;e\314a\273\000\242\230\273V\221X\273\276\310(\271\333\034\256;.\216V;~2\023;\312\216\007\274\202\301i\273\232\nV;\000\246q5\351~\331\2729\255\032<\367Fu<\300Z\034\274w3_\273\031 \241\272\236\025\267\273\232%M\272\007\036$;\\w\215\273-7J<\234\255\316\273\217W\356:\217\357\264:\206\233/\2729\307\307\2714\346\275\272\214{)\272.\223\020\273$3\371\271s+\347:\213\221Q:c\010/;\242\256\3338\0251\222\271\023r\224\271!\205\275\2734a6\274\244\327^\273e\301\245:\246\246[\2742_:<\254\244$\274\034e\014=e\353\250\272\003K\363\272a`\370\272\307\2057=cq\244\273f\221\222\273\370;e\273\202g\017\272!k\251\273\330\336\230\273\"\205z;R`\277\274*\300\026:;\017\341\271\007\241v\273V\030\036\273{s\':\3430\361:&\000\3039U\366\236:\341V7:\354\t\246:\016\320\226\273\300&\257\273\272\267f\273\350\302>\273\205\255\276\273\222\314\230\273l\340\\\273%\355e\273\214$\271;\026\034\346<\302%\211\272:@R\273\205y\346\272@\031\267;\302\327E\273\214\264\032\273\010G\263\272J\232a:*\"\036\2735\362\016<.J&\271}\222\330\2723\2425\273\207\353\n\272\206\252\005;\010\325]\272\206\374\253\272^~\207\270\007\n\247;v\275O9\353\330N;#F\270\273\344t\003\274\330M\364\273G\237\010;\307+\223\273\273{\r\273\017Y\270<\0075\026\273\034\303\026;L\201\023\274\356\322)\273qd\306\225;\241\2672<\206\361\037;\235\005\000<(4\342\273\004\366g:9\234#\273\261\246\325\273\000%\353\273f\304e\273\235\361\022\274\355\377\316:<}\005\274\273m\032;\214\206\'\274\263\350\r\274\220\000\342<<\257w\272S\255%<\261\241\231\273E\367\307\272\321\236a9_n\3109\306Wq9a\034\275:\267\273\217;\006\350\320\272\244|\203\273\2531\371:\214\345\272\272\322\016$\273KG\207;\233\013\300;*n\036;-\241\007\274\211\233\203\273%1_\273V\206X<\353L\234\273\312\227-\273D%5;q\033\020:\206fZ9,\323\276;\342@\221;*\207\344\273\032\354\207\272\363=\211\273\340\351P;12\234\273\240\344\026;\345\007\345\271\2529\212\273\227R\350:\303\215S\273h)\243\273\274\271(\273X\357\210\273\207n\245;\2737\007o\271B\251\310\273\2103\2239\254\016\205<[\336f;\317\327;:\037\230G\273\324\315P<\344D\222\273\326\376\345\272\374OH\273\214\332\036\273\362jE\272\364\362\371\270\317yL\271Z\006I;\317\221\026\273\206\337\262\271\235\013w\273FJ\344;\235\264\371\271y\007\244:\374\te\272a\2545;56\335\272&n\312\272#\243\206\273\255\352\026\273W\343\333\272a\354\"<\203\266\302\273\232)\022<\335\204\216;4\364\005\273H\211\237\273@\313\024\273\226-\322;\226\215\023\274\354N\010\274\343\377=\272\310b\317\273\332\037\226;PX\223\273 \317\033<\313\273a\273#\223A<\034\200\t\273\355\220\302\272\t\010\';QS\';K\246r\273\304\364\342\272f\211\322;\3715n\273\355\320\230\272p\031\026;\037\315\244;\354U+\273@\336$;\n\014\252\273\262a\226\273t*\342\267\317 \347\272\241\310|<66]\273co\261\273\263\245\237\271\311=G\273/}\004\273\020\273\246\272\212i\031\27390\003;\334\r\177;\205\013\276\272\'\206!\270:\342\225;\'I\3409z\034G\272Y5\201:\306\361\302:+\206\373:\367\324\020\273\026BA:\007\265\325\272\331\010\345\272\274*O:mi/<\2009\222\272\247\274\322\273\025 \221\273\026<\373\272\320L-\273\346a\370\272\213)%\272\336\222Z<\201Z\243\273.\325\014\273\004\240\250\273\350\216(<\023\363T;\350#\226\273f\263\23177\004\267\272\223\001\240\272Md\216;\315J_\273\252,\315;/>\027\274\376\315\310\273\262zK\274\240\215\257\273k\335\376\273nZ\307\272\206~:9\006\3034<\251~\314<\t6\002\273\311fC\273\212;\200<*\332\177:<\232\t<,\256\273\273\27665\274#\010\n\272\365&\336\272D\244\211\272\320\250\204\272\364\351\365\272\264\341\235\273\220\313\264;E\207\031<\330\24239J\026\3169\344\362\221\273\215>;\273O\000\344\271F\033\232\273\341\n0\273\344\021\353;\356\352v;\177J\305;W\360v;\245\327\004\273h\337\274\273\217]\017\273\260^\\\273\263\270\"\273\221\273\225\273\266\016\035<\344\353\030\273\224\331\235<\263\r\002\273\240\212\332\273.\205)\274\362\030\2459r\\M\272\232\341I;\224\310\327\272D1\353\272\006\243&\272\246\223\220\273\201W\016\273\245\2439\273`Z\260\272\367Fj<3\210&\273\376X)\273\242N\005\273\335\260\345\272\221\005C\273\307\213L;\n\335\226:\337\227\002\272H%\240:\006G\211;w\t)9\016\0052\271\335\340k\272\353\221@\273EJn\273\255\025\235\272\020c\3278\330\352\0009j\272\262:\004\240\014<\362N\256\272\177+\354\273\243DJ;\024V8<\342\254\0328\334\301`\273=\374u;{\220_\273\343\n\263;\365n\003\272!%\024\274\310\206;\273\313\211\257;\030]\342\272<\340\354;!\306\000\273\364\n\367\2737v\010:\263q\217\273\327\334\023;G\246K;\306\256\206;\240\357X\273\205\312\212;L\t\330\272\277c<\273\260l\3525\333~\235\273\304l\001;\001/\010:\226\232\353:\006x\021\273\360\3512\273\252q\214\27348\214\273#\321\237:\345\206\203<,v.\273\034\355\347\272\177L\260\272\377(\004;k\320\243\272:\341c:V\366\003::)\374\272v\204Q\273\333\362\014\273\336\213\237:\347\024\225;\000\250\374:\'\026\017\272\254F\022\273-\375\020\272\223\332\347:\307 \020<\356\200\260:\267\242\007\2740\341\203;t\366\212\273\232\2478\273\0062\017;O\270\202\273\211\341\021:\364+\0079\352\367\306\273X\260=<\273\"\026\273\351\306\022;\216 \3108_\260\341\271y\366\355\272\323mT\273\020|\253\272\337\014\240;\215\243\224\273\252\374\204;\007=\2019\317q\253\272\341s(;;4\026\272\277\326O\272\212o\332\272\037\207\\\273h\000\020\274\302\352#9\364\347\323\273\207\201D<\251r\354\273\213\263\272<\002\267\214\272\010\210\320\273\207\235R\273\232\275\305\2549\022B\374\272a\210\267\272J\022\030:\376\002\311[:L\224\234\271\307\240\0208vC\237\272\001\223\324:c\n\006\272\311\303O\271\020\'\007\271D\177+\270\000|s\271\334\006\3579\\b\226\272\021\005=:\344\322 9\210\311\3439<>0\271*\320F\2707\001u:R\260\260\272\270$\306:\027\031\r<\217\256\244\2729\366=\273\035\034\270\273\322\224\231:\020j\"8TZ\226\272Y\244>\273\022\020$\273\025\226B<0\327\313\271\034\\\335\273D\334\027;P\3116;f\357*\273\003\005\357\272(\"\2676\276\337\201\273gv\177\2728\212Z\272\234Io\273\010\000s\273\332\245\002;\220\n\023\273\361\233\031<\312\n\303\272\322\001\264;\035\205\254\271\330g\3748\362\330\326\271\220\307\373\272\003\2368\272\t_\034;.\271\300\270\327b?:\022\014\231\271k\r\006:\320\016\\;<\371\210:\204\371\214;H`R<\251=\345\273\025\376g\274\255\251A;\020P\314\271\312&\246\272_0\335\272L\032K;,\250\3119\200cK\273\226ot:\352\300\302\272\025i@\273\372\346Q9u&|;(T\003\2738\242\213:D\361\021;ph\200;\260J\213\272\013\315\214\272P\321\200;)u\372::\017\"\273\007\357\223\2739\227\223\273Y\020\327:wu\211\273\350\232p\272\370K\2729l\206\315\273\362#\246:\324{\360;\267lN<\025/\276\273\255\2632\273\222\253\327\272\301\276%\273\337\356(;\214\010\234\273\375\031\200\271R\021\263\273\367\247\243\272\237\034\017:\370\210d*\273\254\nS\273\013\2569\273;)\224\272tn\363:\'\207\317;\223\315N;T\350\317\273-\226\020<\006\270\212\272\336\240\257\273\017\217l\273a\036\214\2727\210\353\272\271pr:\256\256\240:\375l\317:\220\274\016\270\234\244\326\272\341\312\033\273p\242\243;\316\332\313\272\014\364i\271\246@\273\272W6\017\272_\310\257\273\302\361\262\273\202\210s\274\375\315\263\273\327\243/\272\000\347\3044k\2718<\004}\320\272\027!\013\273\221\024`:\013\006:\273)\010\200\272\327\333\205\2726\027V<@\256\017\266hR\'\273\227\007&:\307\024r\273\"zq\273Xj\333\271\307M(:\r\336?\273)C\213\272\361\335(\273(\005\256\272\225\200e\273C\366\267;&{7\272\014\355\3709:\320\030: \334M\273MF\205\273\230\232\r<1d\300\270j)`:\026BC<\322\373\311\273W\306\037\273\276\00739\313\306p;\263p\325\272<\254\206\273\0218\017\273\300\330\261\271\251\372\000;\3710\221<\232ON\274\323\361\264;\237\177k:\311\263\310\272\033/G\273\372\244\206\273\301|\223\273\250\000\366\2732@5\273\354\240\334;p\265\021; \334\273\273\277k\342\273\372\226\n\273\311\202\360\272\264\205\205<.\t\323:~\276D\273Q\017\037:)(\340\273\322=\273\273@\274y<\324\233\306;b\272\301\273\214\320\201;\265<\267\2732\326\233:\345\020p;*\313\3539\253\272>\273P\316#:?v\353\272\262\t\003\271\"\347c; \033\202\272:y\212\272\316\350\256\272\327\035\023\2738C\241\271\327\346\257\271KJ\220\272\274\226\333\272\205.F\273\337\322\370;L\242T7y~\260:\343\273\212\271\211;!:\272H\217\272\010\014\351:L\373\34798o5\272\036\371N\272\031\216\022\273\317\377\2449\301n<:;\362v:7\021\003\273t\035v98\244\303;\216\340!<\325\367\007\272\r\247\003\273\247\026#\273H\223\202\273\3133\007\273C\005F\2738&g;a\350,\273\373*h;I\027\2409Y|#\273:/3\272sQ\231\2729u\014\272\263\242\007\273+E\021;f\364\006\273\025%\276<\035I2\274M\361\315\273$y\350\273f\305(\274Fsg\273R;\371\273!T\253\272\277\315\323<`\374\260:\342;p\273\263F\315;Z\347&\273\270@~;\2151\265\273\3343\007;\343\tj;\211\0043\273\022\377-\273}\325\232:OI\365\271\216\2648<\330w\223\272\312\277\007\2721\376!;\3626:\273\244\'E\273\357\213\227\273\230o\031\273\236\320\246:\223SO;WW\255\273\310b\263\272\277\221>\273\362\177\315:\304\361\001\273\271\362\363:\032#\264\273,m\026<\336k\210:P\363Z\271\227\'\t\273IN\034\272vf\241;r\370\242:\333\004\347\272F\235\222\272\221\256\213\272\322b\277\271\210Z\007\273R\327\252:E\365\024;R\036\0219.\005\205\271\031\336=\273{\273\030\2728\002\274;R\01348W?r\273\004\024\"\273<_o\272\021\030k:\357\264\272\273\201\030O\237\273l\330\325\273\236\340\261\272N\003\217;9\035\001\274~\020\245\273\336\3674<\276=\250\272\376\215\235\273\340\n\251\273\266\342l\273\333\021\235:\324\354\r;\274x\020:J\362\230\270\020<\274\2679\367\253\273vJ\026\273\253K\234\273\374v\210;F\202\211\273f(\227<)\210\214;\366\021\225\273\225\t\022\2730?m\273\316\322\005\274\336\371m\2721\362\034\274_1g<9iA\274\205c\315;\215X\2348;\332\203<\254\2610\273*X^\273\361P\370\272R\031\3669(t\252\272\024eH\273\267\220\205\273\220\3244\273\225\022:\n\202\273\270\025\013\272Rqv\273\311}\036\272\312\342\2179\316\333\r\272\002\273\241\271\347$\302\267\025\233\004\272\272:\320\271\360|\273\272\242\3734;\223\032K:kyq;\375\346\'\272\256\303?;\317]\223;\247\300d\273\330\305\203\273\306N\327\272\014\033#\273\247<\230:\200hq7\373\036\377;\264\342\374:!\225x9JG\223:\300\023\260\272L\344H\2735\267\315\272\360\355\343\272}\204\2259#\314n\273\274\021;:\223^\250:\\\217?\273\225\235;\273z\2300:\210\024\000<\n\263\234\2733\233\313;jh/\273\366\246b\273\363zx\273\224\350\2679\252-\207;\211f\214\272v\266\2129\312Tq<\323~\210\273%0\010\274*\2433\273\0015 :\316\225d\271\337_\334\272\304\334\214\272iS=;/\352j\273\322\304\372:\331\363\005<\231\tC\273\257V\351\272\325\374\331\272g\206\313\272\nC\257\270\376\320Z\273r\2137\273\007\"\213;\026i-;\241_\3769\331|\222\272\253\027y:\222d\024:g$\230\273~X[:\367\252\265\273\337\003A<;\267\026\274\205\t\327\273\322i\035\273\332$\352:cY%<\362\232|;\343\257\327\272\340\221\374\272\265Vl\273\223\355\243\273\200#\266;\327\207\027;\233\235\262:\343\247U;\214\375\220\272\347/P:f\304\256\2722\373\3279\205\367\031\272\233\342\3049U3\334\272\366\267\335\272\271r\274\271\334\304\331\272]\037\010\310\233\272E\271\236\273h\343\r\273\212\362\332;\037(\242\272\002\314\336\272h1\223\273y\266\322:\224\035^\272b+{:e\352g\273\327\203\223:\226}\017\273\274\203\344\272\222\322G;\022\315m;\025U\240:nK\332\272\031\322\314\273n\016!\273\244}\204\274H\031\301;\354\347\266:\014\260\222\272\332b\016<\362c\005\274]4o:\357\273\214;\264\304\241\272\016\272 \272X\237\214\273Pd\3368\2004\347:Y\340\036;\004\365D9\004\273\272\343)<\200s\017;\014\250-\274\202>G\274V\332\021\27448[\273UH\244;\376\3235;\331\247\355\273\027\257\010;\352[\375\272\246pM\273B\302\352\272\352\317|;\263\2743\274\250\3679<9\377\214:@\373\201\271\315\303g\272>\024\255\272e\nB;\376\243?9\357\332\273\014U\3519\367\337\207\273\022x\003;\204j\202\272\221h\237\272\333\220\001\272\362P\001:\333/&\273F\tW<\205\\\366<9B\210\273\345\223w<\377\"\350\273q\237\005;XN\311\273F\214\242\273\303+>\274\246\360\367\273\253\341\272\273\316\007\216\272m\022\237\271i\3430\273\251wB\273\270\334\242\272}\343\030<\242S\3319\242\253\227\271tV\255\272W\177)9\322\270c;q\3736;\271eM\272q\3033\273\332B\341\273\276\264\014\274\374\006\200\267\030\033\211<\254^\344\273\203\245B;At\223\274\263\310\204\273%;\003\272\314\245\354:\0352\331\273\246\266r\273l&B=\376\363!\274DL,\273\211\200y\273)\364h\273 \003q\272B\277A:\225J\020;i/\367;:\250g\273\031o\007\273_\246\322:j\010\261\272\227o.\272\213mB\273\0237+\273B\205H:\267\372\3578\356\256\006 \365:\227\004\217\271Q\220\240\272QH\345\272\256\246m\271~\374\301\271\270[2:^u\311\272\2420T\273\206\360\3659\210SI:\347\324\r\274\\\222\213:\034\332:<\233w\r\2722\366\331\271[\241%\273\266s\3328cC\232:\205\316<\273>_\375;\306|Q\272\301E\001\271Qy\257\273\225.~:<-\337:\005M\307\273\354\031C\272\370\224=9\315K\360;E\333\223;a\327\346\273)\257\223\272\230\351O;E\035\035\270B\\+\271\036K<\274\361X\322<\370\325%\271\343\312\270\273\307\026E\273s\343u\272\0045\226\273\001\324\020\273\240\001\022<\203\360\326\2732{\310\2737#\207\273\nK\323\273\347\367\200<\372\270\3559\365w\333;p\217\"\273]T*\272\330\033\241\271LK5\273\344V\350\272\240\253\237\272\344T\205\273\021,\r<\032\367\234\272\243\0269;\200\336\212;5\337\310\272\343!X\273\315\313)\273E\236\017;G\204H;\221\326\211\273G\3077\273\327\320\017\273\355&\330\271Q\207n:\001~\024;\035\305\216\2721j\025;\261\257\026\273\242\232\275\272`\377\207;\361\333E\273Vp\277\272Q\022~:\022\367\350:B\351e:i\203\3308\277\236\2669\340\301\031<9\016\262\273\307\351\'9\356z\013\273\333T\350\273\355\365d\272\302n\037\273crh<\t\226\r\273k\314l\273|o\025\272K\247Q\273\221\352\216\271Nn\227:\305\324\300\272\210S\270:A\272\2239\334\025\304\272\277\313\231:\277\027D;@C\003;%i`\272\341\212\033;8\351\331;\310W\210\273\364V\2079\354\247\030;\303Q\276\273\301w\004\273\3248I\272y\335\014<\253U\275\272Lz\200\273\206\234`\273\364\320\2249\\1:\'\253k\272\021\312\203\273\255P\014<\266\257(;\303\226\305\272_C\246\273^\356\002\272\242\004\340\2725*\207;\272o\313\272C0V;WYm;\241\323k\273\333^\352\273\262\336>;\353\234e\272\373h\240\272@h\r\273I\3166;\271^\036;8#\357\272NRt\273A\371\255;\t\276s\273|\036\257:d\177{:\252$\337\273B\357K<\\\301O\273c\310Q\272\001z\231\272fD\004:\005\347\245\273n\315\024<\034:\213;l\264l:\347|\010\273\020yr\273S\236\037\273\004G\226\271\216\013)\273\240\032P\273~p*\272\364\000\007<\\\016\200;\267n \273[W\311;a\023\274\273\325\305\237\272\210\224\"\273\034\260&\272j\203\346\272\361\331\321:\027\206\010;\342\245\206;Y\370l\273\223\317q\273\276$\277\271\235v\254\2711j*;c\027\004\273\017/\005\272\235Z.:\304\315\377\272\020\236\221\273\316\346\237\2721Q,\273\037\307\3369\317\312L\273N\350\014\272\262h\023\273\226\227]9\003\275Y9\236\376\220;\020\221\2678\343\032\2519B\260\253\272\242n\0279SI\244\273\004\252:\272\336S\261:\205\370\343:\307\311\207\272\210\026\344\272\251\2623;\020\337U<\205\302`\271\354\323\271\273\211\r\352\273v\007\230;\250v\263\272A\360\375\272\016\260\032\273\362\273H\273p{H<;\347D;`\341%\274\321\211j\273N\024}\273[\256I;\004\340]\273\276\376!\221:\007\346p\272\235\313\337\272\213\335\244\272\220V\353\270\364Q`:\351\203\257:>\202\002;Z\261T\271\026\210\217\272\272\242\236\271\223C?\270\277.Y;\273\007\323\272!s\214\273u\211\211;\':\211:O5\221\272\2705.\272\242\354\r\272\342v\036\273\360\3415\271\307\261z\272\351I\013:\001\017\0379\037\334j\273Z6\365\267\001f@\273\336\226\274;\353i\235\272\301*\362\273>\214\3069m\206C;\364C\227\272\010c\224\271r6\304\273~!!<\322sJ\272\241s\300\272*\374\2519\257~\3769\273C@;\244S@9$\336\223:\366~\246\272\363\313\"\271\302\010\266\272\204\351\245\2722\374\202\272T\317,\273\262N\214:\277\343g\272\177\305\036\273l\201\373\2726o\333;$\260\014;6\265`9\377\345\'\272\"\327\320\272\332Bu;\304\035\3279\204~\016:Z\014$\273\240.\346:\256e\261\272\310;\324\270`\273\210\271zd\n\274\\\23179[\007;\272\356\213\316;\244_\321;\345W\337\273yoJ\273\371\327\004<8\267\267\271\233\215\314\272~7O\273Mb><\274\351\330:^\376t\271)\243\221\273B\220\006\272\337\260\264\272\013A(\273\377H\236;uB\276\273%\033\326\273\354\2673\273\314n\221\273\\b\035<\260G\010\273\244\177\177<\354\r\227\273\325#4\273{\317\021\271\377/\372\272\343[]\2735o\r\273\266c0\274\312A\236:\231\323>\273\353\333\323\273S\025D:\374\031m;3\200H:\264\215:\273\366yD;\257\tu;\341\226\271\273\0341\026\273\033y\023\273\336\350u\272\310B\021:\214\231\262\272Ziv;u\302\276:4)\n;b\2256\273\307\347\013;xOc\2720h\237\273?8\336:\035Z\235\273\214H\036<\004\027f\273\016k\241:R\221\013\273\275\031\247\272N\367\260\273\306\023\322\272p\322\225;\332\363\350;\303\026!;\204\255\267\272\256\375\360\272#\334\000\272\002\3775\272\202M\224\272z!L:U\353M\272\345\nP\2734\232\276:\215;\200\270\2355\310\272u\376\001;\361\364N;\350\3367:\347]/\273L\301\2209\034\247\213\270O1\203\272\254\364\032\272\013GH9\030\276u:\232\304\014:\362\267\335:\372\257\021;<}\007;4-c\271\334\341\256\271\355\354;\272\253[P\273\240\224\220\272%\216\256\272\240\260\037\272L\266Q;\003\020\213\272()\032:\264-\367\272\007\024\272:\350*\270;\200\357+;\246\023\301\273\375\273\2479p\205\032;D\010\206\273\340y\024\273\006\343r\272\370RG\273-\353\215\273\375\014;\272Mn9:hJ\361\270@/5\273AIA\366\345\272\370v\3657\177\026\2229\352^D\273W\r?\273+Z\244;\363G\240:\325\202\302:\277\2732\274t5\311<\033\036\244\272\372\246:\2733,D\274\374\243D\273\004\221(\273`\364\006\274\316\n\204\273\273\370\245<\0355\252\273\312\37619\211\230|;\251\361\007\273\3163\252:\206\315\241:\262\203+\273\314g\010\273\336O*;M>8;\303\021\355\271\035C\307\271>F\234\271\036G\300:Z\025\006:Q\017\021\273/\350#\271\337\247N:\221\30289\300\010\022:\0069\260;\317\300D\273\307\006\014;\371\206,\273+\233\226\273\267\312\006:\231Q\t\273\346+\247;\234\037\17796K\211\272[\347\341:\206\200\262\271<\274\270:\315\365P;\235hr\273r\366\270:\000\n\3016\243b\235\272\276\375u\272\373s\314\272u\220\2748\223\257\202\272\212\003\t:\'A\";\247\255\220;\210E\370\272\003\206\205\272\272D-9\320H\211\272+\2641\2736\273\224\273\360\231\315\272G\025r:\350\365\305\271NKe;\2069<;\302_C;\332\007\217\271\314\377 \272\307F>\273\253Oz9\321\333\220\2714\246\267\270\244L\242:\370q&\2710T!\272\321<\324\271(,\036\272\031\266\2608\376Y\031:\256\321\026\273\264\021S;\232\242\305:\245y\252\271\263\263\323\272dg\351\272C&6\271\014\002\314\271\257\243\223\272\335\177C;\205\003@:w\254\316\272\224\357\333\272{u\255;u\350\211\271\235\0005\273\344\"\210:\360p\250\272\366\225\335:F\010\234\272\375\231g\271\223\216_\272\316D\233\272i\323\272;\301\231\211\272\000~\035\273\\g\034;d\364\005\273\266\323\216\271-4}\270\245p\330:|\246\262\271\346\355\020\273\314\016\025<\335\026\232\271\366\322\357\273Q\374@\273\340h0:\260\000\030:\3243\221:1\230\3709\351P\231\271\257E\230:RI\31394\244\232:\335\315G\273\021\001\234\272\201w\":\2128\020;\013\236\303\272F\320\272;\241x\366\272\003\226\016;\330\367\240;6V\351\272=\036\316\273\311\353\234\272\217\036M\272&\225\212\273e\345\\;\303Ke<\261\002\353\273\263l\315;\250\222\211;\255v\227\273g\002\003\274:B\355\272.\355z\273Hfn\270\321,\206: \206g\272\225{\267\272QS\240:l>\201\273\235\375\0269\265\'k:\325\332\300:\2766\252\272Z\377-;\277\316\224:\265T\331\272\203u\251:\302~\000;\311\257\231:[\222;\273\3463:9o\375\33294\217\337\272\376\305\217:\256\237\3418\211\326\323\272\327\267\244:\362x\016;r:\350:o\022}\272r\nx9\207t5\272\240k\303\272\030\310\251\272\206\320\":`\031\003<\211\252\021;\226\255v\273\214\361\214;\337}\345\272\327z\003\273\217\254\316\272\244\2400:sO\261\273\311\3724\272\n\371\2369X\271\327:x\264\003\272\206\252Z\271b5\345\272\007D\004;\275\235\260\272z`\0019\200J\022\271\265\375\026\271\265\014^; n\320\272$\315\253\273\324\005);}\213|\273\362\252\261;\356\022\3629\211,\023:\n\\\003\273,\030\2229`G\217\273\350\275:;Xf\327\270\313\001U:\030\237a\273\024\036\003\273\261\344\2429\206\334\263\272k\363\267;\374f\337:q\343M\273\364>\211:\323\000#:5\307\032:7\030\264:\021\220\211\2727\234\001\272O\231c7\377\245\323:\222\006\027\272Gh\031\272{5\353\271\214`;9\320`\322\2712\212\2339\014\211{9\337\247\r9\303\003\2748\021\307\0069P/\3009C\367Z\273\230\301i;7R\363:q~\324:7\304F:>\013[\272\252\233$\273\334u>:\025\314J\273\260R\244:I\246\010\272\224\305\314\271\374T\2158\257l\010<\311p\024\271\007\376\365\271G\020:\273;\032\r\273l\337\203\272z\350d\272\362\211\352<\210\335/;x\230 \274\300\207\033;\260\371\310\272\275\177f;\030r\307\272\272S\005\274L\351\t\274sT\377\273\246u\342\273\313a ;\257\037v\272\347\262B;gXx\272\014\243\323\273\224\2351;@i\220\272%Tw\273r\251D\371\272r@\3269\217\210\253;.wC:\330\343U\273\020r\220;D4d\273\377\3148\2720\346F;\254\200\315\272\233\206\001\273\341&\037\273\364\266\3149d\000n:H\334\357\271\302(\325\271?j\304\272GC\322\272j\354!;\345\'\023\272P\304\0137\314\310D:\331\355\220\271\\\267\215\272\263\250W:\000+\2358\001\363\323\270\257j\371\271\026k2\272\237A\3239\372,%9&\245\227:\204,!\272\320\277$\272n{J;\000r\311;\n`\375\271/\002\010\273o\014\210\273\010\320M\270\225\346\331\270\234-\224\272\326\004\t<@\020\303\267\356\317\255:\006\005\3548\360\314\014\267\\\345\"\272\332\002r\273\307\"L\273\261\315\212\273\222c\370:\310t\213\274\273\260\335<3\014\032\273\341\353\245\273\326\226\r\274\035g\021\274\276H!\273\007\023\336\273Rn\2249\221[\303\222b\273\215\014\201\270H\271\024:\372\\|:Heh\272\037\325\355\272U%\231\272\024R3:7\353\356:\026L\353;\310\224\233\273\337\337e\274/\034\223;\211\2239\273\014\263\001=\212\254\224\273P\2175\274\346\311]9{\005\317\273O\370\236;\350\221\020\273\210\221\351\273\226X\035\273\003ob\272~\342u\271\237\321#<\274\306\277\272y\372:\2712\345\304\271\317\370\256:>d\2208\334zY:6\327]\272\320\034\024\271\245ZB\272`\013\201\272B\344\2419\237\210x:\347\243K\272\010\204\355:\300Cp:2\325\t\273\326\245\271;j\251\222\272xb\363\272kCI\273Fu\340\267]\335\t:!\332B\272\200\372\253;\273\251\227\272\357|\2119\027\002L:J7g\273k\036\227\272\214{J\272\300\200\277\272\326\325\013;\277\002\270\271\232\025\001;=\\\325:\010\261\304\273\330\t\246\273\311o\032<\205\364\223;\326\013\345\2730p\004;i\251\201;\036\006\356\270r\'\242\272\366\210\367\272\206u5\273\035\212S;\352W\3009N\016\255\271,:\356\271\267\003G\272m\303\210\272W\335#;\205\206\201:b\212\311\272\232\024\016\2738\274f\272s\255\3439*\036\237:\361~\303:\212\364\202\272\260X\206\272\355\307p\271d\372?\273]\033\257;\327x\241\273L\004\345\272\317T\010;&V\322\270e\272\002:\211\377F;\227_\26099\247-\273\222m\216\273\037\2510\273\300\331\235;G\3144\272\244p\3269}\020\343;\"V\'\273Z1\3509\300\327\311;\306\264\033:\006\343\177\273eY\244\271\006\371\266\273f@-\272\200\326\213\271\314\t\274\273/ZS<\330\306P\273H\202\335:/\302H:\032\307\002;\323D)\273\"/@\273JGv\273\266\243\3229\3430\314\272\255A4;\303\204S;J%A\273\207\002P\271\273\377v\271\024Dv;@F@\273#\246\375\272{\306\250\271\016f\031\273w\270\324\272o\321\017<\337\361\341;\2443\032\273\\\331\306;\251(\252\271\200\377~\267\370\221*\2734F\370\271k\010\022\273$\361\025:\363{\265\273\334\n\032:\331\216a\271%\375\257:\034\312\231:\323\321N:0\341:\273&a\243\272JlN\272\2445h9)O\201:\267\273\017\010J\2703\207\3758.\371\177\270\261\021p\272\"\007\376\271\350\237\206\272\241\336<;\304\244*\273\017B);\027\2407\271\331=,\272\363B\213\271\212\025V\273]\234E;\000B\322\273\333\212\013\273\267\037\021;o\034\353\272,/\2339\344\344\310:\323\220\340\272\024\336\310:\363h4<\302\222\224\2733\316\006;|l5\270}\017F;B\263\210;\220^\234\273\003\255#\273\374\226[9}/\353\272\266\024\026:\003\255r\272O&\303\272\216y\251\271u\370\236;\351\027\326\2720D\275\272\266\273\364:\260\325\336\271E\004V\2722v\3059\266\305|\272{W=<\356\371\215\272\345\257\357\272h\232\016\272\252\252\3009\345\206\370\271rS\242:\010W\322\271?q\262\2739\343]\273\364\212\3729\250\207_\272&\002\221\272\020[\244\272\311\201\004;\355H6:\262iM:\214\277\345:\200 \014\273\220t\307\271H\017N\272\374C\010\272\244\312\353\272\343s\225:\026\356\036\273FVG;\254\371<8?\"\374:\034\023\272\2154\227\272\321\2479:21\270:K\36219\234\026\256\271\373\233b\272\n\321\033:\314\252\307\271\033\215\334:L*4;\375\300\275\272U\206F\271(\337\317\272\"\230\303;\334G\206\273o\265\3379>D\235\272t-\027\273\354\334\236\272\324\253\2159\347\352V;9Se<\363\327\267\272m\267)\2734j\201\273\274\035e\273\027\016\001\273=\333?\273>\273\013=w\254\341\272\326G\007\274n\310N\273q\315)\273W\272\321;\216\214\333\273\203\362\037\274\244\222\222\273\271\224|\273!\251\213\272\255T\000\273Z%\225\273\200\217\304;\240=Z\270[\242\005;\r\350\353\272\t\006w\273+\322\006\273\332s\354;\027\004P\273\374sS\271\322\205v;6\223:\273\346_\024\273ia\";*\0047:\234*#8\253(\2479\357(\234:b\235D:\005\260\221:\257\224+<,\0277;\253v\242:\034\327C\274\3564\214\273t%&;\334H@\273x(\2129W<0\273gt\323\273\013\211t:\333E3:`u\346\273\302,\330\273`\341\335\272\213\323\0219]n\231<\010k\202;.7\367\273\206\3006\273\227S\371\272\230\025\006\273v\313_\273%\315\3329\372\314\2309\320\235\324\272\364\243 \273x\356\027:\201x\033;S\005\014\273\003\241\024\272\231\r\243;\360LR;\334M\022\273Z#2:?\346!\273\346+\210:=\243\362\272@N)\272\223R\014<\023\215e\273\213\255C:M\r\200\272\016\324~;kD\005\274]\254\360\272\340\203 \273\021\032\247\273p*\020<\201\037?\274\350}\261<\214:i\273\010f\231\273\301\207\007\274\3325@\274d\330+\273\314}%\274p6Z;\204d\342\017\273\216>-;\267\246E\273\000\374s\272O(r\272\343\302\026;#r\356\271\252\225,\272\225\323\3649\330\336\2115A|\301\272\331C\360\272\271\230c\272\375b?:UC\013;\372I\253:\332_\006\271~\004\252\271\306\363\216\272`1\023;Rp4\273\312\t:;\264\342\367\272\371\004\237\2722a\230\271Lc\214:\\]\2619\344FC:\371\013F\2732\230\027:\357\003\211\273\302\352\002\273\227\020\004;\037t>\272b\\\302:\004\346\306\272\257\036\333;\345\2612:\352\037\010;\267\337\005\273]\207\221:<\247\3768C\266\315\271\305?\217\272\266\354\034\271\214\253\34594\236{9\024\030\242\271>HI9\204\025\t\273\2667\202\274\333\017\250<}\276`\273r@Q<\256\305\335\273\246\023\261\272A4A:\253]\231\273\314\337;;E\014\334\272r\350\361:9~\t\274\244\212\335\272#\334K9\353\306H<]\001\236\273\270!\322\223:\216\215\005\273\354HT\273\264\347\3119\300h\017\273b\332\017\273DQ\206<\303`\251<\203\355t\273H\340m\272\260p\202\273\024\000\306\271\345\366D\273|]G;\t\324C\273*\346%\273\333\325\312\273vi\336\272T\333\000\272\272O\3409\375t\324\272\205~\034:\014\371v\270s\261\003\272\313\352\251:D\240\242:\303\374S:\002\365\014;J+\314;\312\310\204\273d\201\";7\020\203;\300\273\325\273\"\333(\273\207\223\237;\200\310\305\273\334}\016\272\023\250\2269\022\257Y9\307\227/\273.\307\r:l\235\361\2728i\222:\202,\037;\302wZ:zA\027\272wO\245\271\227N\010:\240\250\n;\322\177\3509\034\206\023\271\363\213\244\2722\266\205\272\226\322\032:\216\233`:\301\332\251\272\260\203U\272\361\220L\273\014\n\311\272T\350\253;d\331Y:\316:rs\324\272\017\223\201;\334\221\013\2723JL\271/\032\214\272~\377\023:\027\341y\271\312&\2548\240\244\r\273\035\273m\272\271\201\003:x\032)\271\036\341\227;\311\001-\272\374\376\261;,~?;\241\366\332\2732;\017\272h\022\003\273\324 \036\274\273\362\333;\016\363/\274]\300\272\273\375\315\t<\316 \365\273*\253\027\2737o\263;\362x\241:\311\303\225;rlO9\032W\206\270 \364\3219g\330)\272\326$\275\272\350/\365:v\262A\272\373{\3229/\352\2039\262Q^\271X\360\022:\2074\005\273\227\305I\272o\321&\273!<\260:\332\005\273;\275\213#8jb):\313\311\205\272\263\340\374\2729\341V9\335\306\0168R +\271C\n\2659\225\036\341\270|\306$\271\0064#\270\205UL\270T\213\326\270\262\332\0138\362\347\021;Pc\303<\217\326\023\274\357\204\014\274d\3134\274\225\210[<\324\337\3159\336\3403;t\005K\274\026!\320\272<\366l\2726\310\203:\333\223\272\272 \007\033;~\022H\272@\257]52\200\223:\r\352\257\272<\2147:^lH\272\250b\301:|\263$\272\002\231K\273\347y\004:NZ\236\272v\265J\272)\240\243:\311.\317: K\262\271\002e\243:\324w\006<\217\032\200;\274V\211:\304\302b\273\254\r[\273\340\232\275\272\371\200\003\273\276\257\306\272 \260O8\322\017\275\272/\325\'\272-JD\2718\332\3459\241\2413;\357e\317\272B\324\206:\355\024\217\271\322\357(\2723\'\342\271Z\265\353\271\t\372v9\300f\020\272\300\221\2168\337\206\3649t1B9\200\305F:\346\251\007\272\031\313\212\272t\2773:B}\216\271\036^\000;\244\247n\2711\026\271\272J\304\331\272b\224E\270)\020r;\233=\252\272\245\343\0059\224\211j:\023\342\004\273\330\214\032\273y\022\247:\221\3145;~\312\t;\010|\025\273\213\257\362\272\205\220\014;\356\237\2308\3277\005:(c\034\273U.\355;\010\372\334\272\260\237\321\272?\374\364\2721\227\220\272\035\224\021;\004\313\255\273\023\014\216\273nr\272;\344\273C:b\204\026\273v\364c\273\302\345\233\273\2741%<\254\351\302\272\255\243|\273\001\254\027<\340\321g\273\002\220<\273\374g>;\017=\307\272\256-W\273x\035/;<\340\023\273\021z\\:,\023\275;$I\223\273\240\271\263\272\306DA:\202\326\327:<\247L\271[\226\2029\340*\210\272\244l\315\270\313,\2649)\354\21391\372M\272\355\363;\272\025-\2549\360\324x\273\273\016\227;\354\024\323;\352\230\243\273t\022<\272\316\312e\272\376\005p:\246\253`\273t\374I;\r7\242\272)\366\301:T!\025\273\226K\027<\010\356\3469?\307\340\272\203\267\234\272\202\271B\2734\311Z\272h\305\223\271.\204\366\272\2421\201\306 \273\341\341i;\231\330\323\273\374k\305\272\014^u\272\335&:\273\225\237\241;- \273.\266\2529\005\023\203\272\213\010\014\272\321\'\225\273\306\3433\273\204\002\016\273}\231R\272\344P>;\031x4<\352I\207;\255q\242\273)\370\225<\230\222\2629\316j\037\272uDl\274\030l~\272\323E8;\351\303t\273\325\371j\272\2111\205\272\241\356\203\2735 T<\240p\300:\266w\330\272\354I\037;\313~\334\272G\031\352\273E\337^\273\010\000\010;3\251\216;y\021\240\273\336=\037\273Lf\n\273\202\216\020\273\264m[8\014\264\252\273E\355\240\272\257(\227\273\255\270\225<9\242\2559\315Y\300\267\024\032\324\270\357\232\211\270\210,\n\271El\231\271-ez8b\026;9j\036\0238\002\360\"70\024\3509r\274\252;b\200\205\273f\221\271;\030\351B:\206\221\350\273B\035F:J\241\016:6\214\256\273\r\264M;/r[\273Mz\317\272\364\023\360:\277\347o:\000.\334:\261W\017;\336Ni:\203\331\204\273Q\034^;\240\000\372\272\007\313*\273\t\350\264\271\273\274\322P\273K)\177\2739\234W<\231I%\273}\270\0058\315\272\325\325f\273\204\273\373:\304\203C\273t\301~;:\324\027:;#\242;\354\254~:~\344\256\273\252\221\220\272a\032\223\273J\301\021\273\274q\274;\214\357\221\273\n+\365\273^o\035;\344\355Y<\334V<\273\rY\322\273kQ\035;\307\202\033\2724B\335;\323\335F\273NT\255:0=\341\2720\013\335\271\"\326\343:\004d/:\216\327*:oA\245\272\271\326\253\272\270\304\270\271<\0023:(\032\005;\335\243J:\366E\356:\246\017u\272F\037\014;\336\375Z\273\245y\022\272F\365\3439\374\177\314:\357q\200\273\031g\231;L\203-:.\316\223\273\224\0174\273\016~\327:\316\307H;\032\241\3719\265\264\213\272\232\034\367\267ru\016\273\200I9\271\301\"(:\002\316\036:\032.\201:bn[9\256\266\261\272+\245\3249\2535\213\272\322\003\360\272\276\333\316:\304|\2259\256\212\211\271\310\263\031;\026C\023;&\215\t\273\027\241Y\271\236#\203\271\332\333W\272Z\260\365\271\326\361H\272\205\330Q\271q\260\310\271\037u\212\271\233[\325\272r\305\315\272\361qB;\2467\244\271\266\310-:7\354X:\312\256\320\270J1\2239P\2574\272\202\211\313\272\020O\352\272\r\340\307:~[\t;,\021\370:\357\240b\272\344\217v\267\223\rh\272aA^\270\343>\3617\000\34587\010\210\3057\322\302\3477 \232\360\267S\006\230\266\332qF7]\234\003\2677\221\022\267\223\3669<\024q\037<\377\366L\274\2430,\273\271\300M\274\343tB>\016\273\374&\024\267\372\311&:]\320\353\271\235\303\205:\004\025\353:\201\370\200\272\311\017[\272\010)y;\271Yf:\r\277\313::P\014:\023T!\273f\366o\273*\230l\272.\265\014;+\354\314\272c\r\254\271\270\352P\272\372\276\2528\235\322!:e`1;c]\221\272\307\233\010;O\366H\271\223\306\267\272\372\336\025\272\n\346\271\272\370\226\331\271)?r\271=\3466:j\001\0169\337\277\0259t\361Z\271I\026\202\271\336G\3719\306\351\252\270\003\365\213\271z\321\";\347?\213\271\020\236\211\271i\027\025:\004\213\323\271\225\\\312\271\r\327\272\272\270\036\31294\031\2159e\350|\272l\344\316\272\335\345];H\177_:\374=\327\272N\266v\273\315\314\025\273\244y\246;\t\237\361:\332\203e\271\3707\333\272\307\2543\271p\014\204\272\214\277\022\271.\033}\273)\312M9\306\230\357:\n}\324:\330e)\273\204+\331:\266p!;\311\373(\273!\375~\272\265^%\271\210\255\020<\325\2176\272\276\003\365\273\254\320\275\271\004K\254\272\324\\M9\016\210\224;\202 \227\272\355\346\221:\177\271\367\272:\010\022\273/L\301\270\205\317:\272Gq\n;\240\251Y;nmP\272m\003\2529a\236\364\271B\025+:>j\r;\033T\234\272\257\033\254\273,H\262;b\022\200:\221\014\254\271\317Y\216\272v\225l\272\303X\202<\231\344<:\261\314B\271\006S\010\273E\202\255\273\374\005\r\273\357!\360\272\377xH:Fr\333\272\213\344\210\273f\303\3777\243\237\021\273\254\330\026:\020\253\202;.\222\266:\272\303\026\273(\267K\273\362;i;\350\360&\273\231\352%:\276\321-\272C\340c:kF\340\272\223Dn;n[^\272\312\333\355:w\371\034\272\302\260\352\272v\034\307\272\304\243T:}A\220\270\206s3\271\016\335\2159v\203\322\271\327\032\202\271\226\030X:\233\260\037\272\007\245\237\267 r09\260|\2059\276\004\276\273l\220\006<\315\214\334;]\214\342\273,\356\035\273e\245Q\273:\014R;\004\301\"\273\366\335\245;\315\254\035\273\261V.;N\230\036\273\373\2729<#\375\320\273\035\240\027\273\340\273\034\273+}?\273\333`,;\2214\265:\017\244\317\272\234)\355;\033`,\273x\202\353\273\226\013\3708N\306~\272m\371\320\271\346\311\'\273\321V\006;\026\337@\273\"\272\356;\036\202\026\273\376\004L\272^]\213:\255\035\260;Z2\363\272\365\01079?\305\377\272r\204\364:`\225\300\271\016\026\241\272\214N{;\341\366I\273\2715\r<\003`\352:\036\t\000\2736+\000\274VJ\277:\347\312\350:,|`\273\360m\234\272J\362\025<\\qw\272\313$C;d@\302\273\276\260R;D\nb:\2232B:\332\223\267\273w\003r\273=\226L\272z\371\025\273\014*\241:.|\353\272\"^\221\273\2112|\272\227\014\242:X+D\273\211\201P;$\036\340\272x\347\013\273E\232I;\222\267\234\211;\345\010B\273\372\016\232\2736{\004\272KY\3439\262\370\003\274\254M\234\272L\367\322;\276E\343:\303j\227;\302\\\343\273t\223DZ\272\0047\211;\007h\002\273\242\017F\271\257+\240:\352\254G\273\227\224\254\272\234?\272\272\352\242\2509\017m]:\353\330\265:(\222M\2727\003\265:r#\2359\274xR\267qnt\272\343\260\351\270\034\177\242:\340r\330\2711\n\004\272\030I+\271\032\260\017;6\253\027\272Q|\010;\016\350\352:%\006\204\273^k\237\273S\355<;\220\203\030;M\311\226\272H{+\272\315\027\227\273\341+\2449\224>.;\316l\002\274\263\225\260;\346w\002<\371\206\257;Q>\315\273\277\243\220\272wW\344\272tW+\273F\225};\313\253\201\272W\333\022;\036`\213:a\3451\273\334\227\202\267\250kn\271`\332\301\272\230,c:\327\211\210\273\303r\014;t\231\357:\237\233\252:W\354\206\272\000\322\0249x\231t\272\002\336\217\272\370\013i8;A\346:ps:\272\010f+\272\313\253q\273%\217YH\207\273/e\000:~\031\277:\304=\334\271\357P\021\273K\"\006\273T\253\3509}\'\002;8g\270:\324?y;\353\320/\273.V\371\272\375\375\211\272z(\275\272D\002\303:\001\234\3339\224\226_\273>\031\231:vdO\273\243E\213\273\276\217\250:\213\257\005;\333\246\234;GtT\272f\274\376:\205\350\3339\304\376\204\272s)\026\272\016\322\360\272\023vU\272\201!5;\361CP9\370/$\272\021O\210:o\217\3509\344\020Z\272\274\0276:\312\246\270\272\360\307\020\272Q\245\232:M\222\2778]\177{:+6E\272M=R:\277\322>\271^o\001\270k\2026\266\031\236\0108\344\356\002\267\211\352:\267$i\1777\212\253\320\266\230\231\2367\261\0255\2666\204\260\266\033\261#<\004\014.;%\014h\274\234\323\210\272\254\314\270\273\267\332\010< [\305\272jn\3369\0269\341\273:\245\377;" - } - } - } -} -node { - name: "ip1/weights/read" - op: "Identity" - input: "ip1/weights" - attr { - key: "T" - value { - type: DT_FLOAT - } - } - attr { - key: "_class" - value { - list { - s: "loc:@ip1/weights" - } - } - } -} -node { - name: "ip1/biases" - op: "Const" - attr { - key: "dtype" - value { - type: DT_FLOAT - } - } - attr { - key: "value" - value { - tensor { - dtype: DT_FLOAT - tensor_shape { - dim { - size: 10 - } - } - tensor_content: "w\211-\274\n\017\206\277\367g\016?\244\245\237>\262.\273?n\331\344\275\035n\371=\202\233M\276\356\356\313\276\337\370.\277" - } - } - } -} -node { - name: "ip1/biases/read" - op: "Identity" - input: "ip1/biases" - attr { - key: "T" - value { - type: DT_FLOAT - } - } - attr { - key: "_class" - value { - list { - s: "loc:@ip1/biases" - } - } - } -} -node { - name: "ip1/ip1/MatMul" - op: "MatMul" - input: "ip1/Reshape" - input: "ip1/weights/read" - attr { - key: "T" - value { - type: DT_FLOAT - } - } - attr { - key: "transpose_a" - value { - b: false - } - } - attr { - key: "transpose_b" - value { - b: false - } - } -} -node { - name: "ip1/ip1" - op: "BiasAdd" - input: "ip1/ip1/MatMul" - input: "ip1/biases/read" - attr { - key: "T" - value { - type: DT_FLOAT - } - } - attr { - key: "data_format" - value { - s: "NHWC" - } - } -} -node { - name: "prob" - op: "Softmax" - input: "ip1/ip1" - attr { - key: "T" - value { - type: DT_FLOAT - } - } -} -library { -} diff --git a/tests/TfInceptionV3-Armnn/TfInceptionV3-Armnn.cpp b/tests/TfInceptionV3-Armnn/TfInceptionV3-Armnn.cpp new file mode 100644 index 0000000..94878ae --- /dev/null +++ b/tests/TfInceptionV3-Armnn/TfInceptionV3-Armnn.cpp @@ -0,0 +1,23 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// +#include "../InferenceTest.hpp" +#include "../MobileNetDatabase.hpp" +#include "armnnTfParser/ITfParser.hpp" + +int main(int argc, char* argv[]) +{ + std::vector imageSet = + { + { "Dog.jpg", 208 }, + { "Cat.jpg", 283 }, + { "shark.jpg", 3 }, + }; + armnn::TensorShape inputTensorShape({ 1, 299, 299, 3 }); + return armnn::test::ClassifierInferenceTestMain( + argc, argv, "inception_v3_2016_08_28_frozen_transformed.pb", true, + "input", "InceptionV3/Predictions/Reshape_1", { 0, 1, 2, }, + [&imageSet](const char* dataDir) { return MobileNetDatabase(dataDir, 299, 299, imageSet); }, + &inputTensorShape); +} diff --git a/tests/TfInceptionV3-Armnn/Validation.txt b/tests/TfInceptionV3-Armnn/Validation.txt new file mode 100644 index 0000000..81b64dd --- /dev/null +++ b/tests/TfInceptionV3-Armnn/Validation.txt @@ -0,0 +1,201 @@ +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 +208 +283 +3 diff --git a/tests/TfMnist-Armnn/TfMnist-Armnn.cpp b/tests/TfMnist-Armnn/TfMnist-Armnn.cpp new file mode 100644 index 0000000..5625f4c --- /dev/null +++ b/tests/TfMnist-Armnn/TfMnist-Armnn.cpp @@ -0,0 +1,17 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// +#include "../InferenceTest.hpp" +#include "../MnistDatabase.hpp" +#include "armnnTfParser/ITfParser.hpp" + +int main(int argc, char* argv[]) +{ + armnn::TensorShape inputTensorShape({ 1, 784, 1, 1 }); + return armnn::test::ClassifierInferenceTestMain( + argc, argv, "simple_mnist_tf.prototxt", false, + "Placeholder", "Softmax", { 0, 1, 2, 3, 4 }, + [](const char* dataDir) { return MnistDatabase(dataDir, true); }, + &inputTensorShape); +} diff --git a/tests/TfMnist-Armnn/simple_mnist_tf.prototxt b/tests/TfMnist-Armnn/simple_mnist_tf.prototxt deleted file mode 100644 index f8573ec..0000000 --- a/tests/TfMnist-Armnn/simple_mnist_tf.prototxt +++ /dev/null @@ -1,117 +0,0 @@ -node { - name: "Placeholder" - op: "Placeholder" - attr { - key: "dtype" - value { - type: DT_FLOAT - } - } - attr { - key: "shape" - value { - shape { - } - } - } -} -node { - name: "Variable" - op: "Const" - attr { - key: "dtype" - value { - type: DT_FLOAT - } - } - attr { - key: "value" - value { - tensor { - dtype: DT_FLOAT - tensor_shape { - dim { - size: 784 - } - dim { - size: 10 - } - } - tensor_content: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\222\n\007\222\000\000\000\000\0102\346\276\000\000\000\000\0102\346>\000\000\000\000\000\000\000\000\316\232\373\216\000\000\000\000\000\000\000\000\320\330\223\222\000\000\000\0009\006|\277\000\000\000\0009\006|?\000\000\000\000\000\000\000\000]\273\211\217\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\250H\311\210\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000p0\206\206\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\310R\027\225\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000/0\007\275\000\000\000\000.0\007=\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000{\226\024\246\000\000\000\000\231\222{\300\000\000\000\000\232\222{@\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\311U\277\273\230\350\276;\000\000\000\000\022\002\000\301\000\000\000\000I+\001A\225\224\224\275\332\221\000\233\000\000\000\000\000\000\000\000\220~\221\276~]\221>\000\000\000\000\224\365\034\301\000\000\000\000\215\346$A\377\375\375\276f\304\033\235\016\026\340\260\314z\243\242\2447a\276E\373`>\000\000\000\000v\324,\300f\254\277\221\004\2404@\372\370\370\275\037+\016\235\177\345\002\262\257\264\267\254\366<\333\275M\263\341\276\000\000\000\000\236\013\370\277\'$\351\222\"\026\037@\341q\331\254\t\\\355\231 \233\257\261|\303@@{\233O\244\361\315f\300\000\000\000\000R:\263\300\362\241\327\222\273\325\331@7\000\360\276\214\004\'\211\235\305\022\2763\201\276@\031\022U\244\264\371\222\277\333u\213\232\216Vo\301{qP\277\363;^A\027#\027\300A\365J\222ln\017\277\325\236\034@S6\244\244\363\023\254=6 \206\230B8\217\3008\370\t\277$\020I@\010*\357\276]\224z\275\224\027\005\276\360\270~\230\236\316\200?\235\204\014@\250\210\357\2710\265\231?\021\215\030\260\222\256\200\300\212#9\203\t\017\303\276\254\230(\244KKK@\022\327\327?^\037\222@eJ\020\272+\267%\301\342\340`\276\252#\003@\265\2644\276\275\255\307\271\364O9\277\356\352\nAA[J\277f1\263@y`\255\270\373\2074\301\237\236\236\276\301\366\017?\370\366\366\276\301\236\305\266\234B\375\277&$\204@Qy\000\277\373\227\352\276\202\347\365\211\345H\217\300\006\205\010\275Ew2@\354\352j\276\366\311\307\233]\327\233\277\322G#\227:\334N\275=\024F\300\000\000\000\000C\240\n\277\240\303\310\276\3072\235@\000\000\000\000\204\014+\233VUU\277|\204\273\230\000\000\000\000\215r\330\277\000\000\000\000\026*m\300+\200\203\276\305\375\270@\000\000\000\000jOl\226\203\202\002\276~)\027\231\000\000\000\000f\373\216\276\000\000\000\000\036H_\300\207\030\356\264\213\'q@\000\000\000\000\222<4\225\013\274\031\230\373\005z\230\000\000\000\000Q\254\241\265\000\000\000\000\342Gb\300\000\350D\264\346Gb@\000\000\000\000\000\000\000\000*\000E\226\000\000\000\000\000\000\000\000F:\016\240\000\000\000\000\226\233\217\300\000\000\000\000\226\233\217@\000\000\000\000\000\000\000\000\035\306.\223\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\210y!\300\000\000\000\000\210y!@\000\000\000\000\000\000\000\000H\367c\221\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000A\n,?\000\000\000\000\000\000\000\000\000\000\000\000\201 \277\231\352h\275\206:)\000\000\000\000\212\203\213\267\000\000\000\000a\335\\\276\241\240\240\274\205\204\2137\335\337\032\256\000\000\000\000\247%\202\250f\254\225\255\000\000\000\000c\310E\26617\005\263T\007\365\275\000\000\000\000\344\010\365=\000\000\000\000\000\000\000\000\000\000\000\000\270pe\300\000\000\000\000\227\206\0264&.-\264\311*o\300\000\000\000\000\275M\352@\000\000\000\000\000F\355\217\000\000\000\000\224\240I\300R2\347\275\222}\250\276r\004\024\264\260\355H\301<\332L\210\0309\202A\225\224\224\275\351\335`\275\030\333\232\210H\266\310\277\177U\010\300\224p`@@H\244\277\305\034u\301\277\332k\2427\262\215A\376\374\374\276\374\325\330\276?ed\261[>f\2777\013\202\300\335\361TA\202]\000\300\320\271G\301%\300\277\276#\005\350@\220\217\217\276\302\203\205\276\306\304D\276\342=3\277\236\036\023\300\210.\225@\250;\032\277\245`\245\301\004\030\205\300\207\330\305A\342\026\302\2758\306\330\275b`B\277#Hi\300\024K[\300\277E6A\034Y\206@\354z\220\301F=\"\3017\177\265A\264}{\277y\253\256\277\313\267(\277\322\257L\300\215\215\r\277\253R\001A\370p\375@i\372\275\301\323q\355\300+\003\317A\n\3013\300]\332L\300\201\202s\277\350\330\245\300\200\177\211A\301\206V\300\t\213\005@\337l+\301Q\315\300\300Z\360/A\3378\020\277a\364\251\300\316\316\320\300C\257NA+j\216\277\375]\"\300>\034\303\277\361wK\301u\320\240A\365QgA\230\372}\300\345oM\301\251\227\254\277\366h@@\200(\345\277\203\307m\300\260\345\227\277\363l\010\301\323K\211A9\034\217@\004C\321\277+j\202\301\374\346\232@\300\325\022Au\272#\277\330\243\340\300H\276\303\277\321\203\315\300\t\243\010Aws\324\300\2121\202\300\351\337\216\300\236\';A\230\025\017A\204~:\276@2\274\300U\306\302\277m6P\300\214\275\242?\"\0032\277\365\016\374\300n\233\014\301\341\377IA\272\231\032Ae\375\026\275uj\205\2777\342\356\277CE\t\300\033&\364@\034e\372\300\033\270E\300\266\313\006\301\220!3@J\306\202A@r%\275\263h\220\3003\n=\277\247G\017\300\305W\027?i\211\361?\346!\204\276tp\001\301|l\334\300\252\207\202A\373_F\275\305|\234\277\036`F\275\013\356\230\277\240\222\342\276y\203\004@s\"\006\275\237\332\032\301\233\262\013\301\201\276\225A\023\337\253\274#\305*\277\023\337\253\274\232\323\263\276\213\375Z\256Y2\244\277\017!\377\265\025:\022\301\267e\235\300&\024{A3\216\341\235\312c\021\272uJ\273\256\214\316\212\275\244\014\203\255\235\253*=\305\226\\\263\320\230\244\300MK^\300\343\"\013A\000\000\000\000\373\266X\275zF\245\222\367\260\016\233\037z\351\203\276\n\200@\000\000\000\000\363Z]\277\032\017D\277\021\200\357\277\000\000\000\000{\327\377\276\000\000\000\000\000\000\000\000\000\000\000\000\215\263%@\000\000\000\000y\346\t\215\233\354B\236 \002\021\300\000\000\000\000j\213\245\276\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\372y\2217K\210\252\236l\233\364\204\000\000\000\000\000\000\000\000\302\256\217\267\000\000\000\000\322\345g\264\000\000\000\000]\261\353?\215\214\2126\037\312\241\246\344\3138\237\3038\370\277\371\332\300\274\356\004\206>B\236\242\257\365\207\017\276$\2721\271\3254\024\277\000\000\000\000\353\311>\276\023\017\017\277\367\232\032\277\250\376\376?\216_\203\275\003\323\330\256\326\257\247\203\26646\251/\031\262\300\217\322\010\277\306\303-\277\032\205\'A``7\277\372\364}\300R\024\252?\177*1\2368\314\253\276l\330Q\214OX\027\301\372\361\265\262Z\206\215?\207M\367@o8%\3019b\006\300^)_AW\325\273\274\244\233_\277V\'h\241\207\177$\301u\263\246\277\221\215\237@S\264BAm\274R\301\\\022\323\277u\223UA\277\014<\277\217\303V\300\234\240 \274{\377\336\277\265\225c\300+\261\025A\325P\210A:\037\301\301\3674\233\276\331x;A\013\272\201\300t\373h\300\372$-\277\223}\275\300\335w\222\300\000\247\335A\007\234\026@\253\242\374\301u\211(?\367\020xA\375eY\300\030\277v?\013\010\333\277\177\352\232\300k\022\016\301\221\246JA\211\266DA\343\031\255\301\010\316\240@J\2459A\277z\321\300y{\032@R=\006\300X\362U?\331d\216\301\"\303\230A@;\334A@w\272\301\304\320\250\300K\\\217A\2103;\301\265e\203\300C5L\300p\326B?v?\204@\256\327%A[\243\004B\345V\032\302\237\335\351@\305Q\300@\0043\r\301\013\317\000\301\t\330\310\300\007\231pA\020/\302@|\267\317A\242z\271@E}B\302\212\255 \277t[\212A*\177\352\300\357A\255\277\305\253C\301\214vvA(,\017\301\001\3358Apw\356AA\031\341\301\262\261\207?\260\007=A\001D\002\301\304\013\005\301\030\311\200\301\221JE@\263\016%@E\321\035AL~ZA\304C\262\301\022\3107Ao\364aA\373\366\365\300\037\362F\301W-F\301]\024\rA\373V\nA#\224\001A\030x\212A\350\302\252\301j(\256\277\342j\036@\251\205\261\300\200\335\021\301)>\377\300\312a\t@$\2065\300\335\246\216@\231\332JA\207\t{\301\245C\007A_\264\353\277w\352\017\300\200\006R\274\203r\243\300\251&\345\300\2167*\301k\306\031\301\337?\224AR\r\210\301<8,AKc\267@:<\003\300+\356.Ak1\002?E\"\372\300\341\276\250\3004\317\246\300oW\217A\244\370\263\300\243T\274@\347\201!A_\024a\276\241\323\r\301\026\352b\277\262\'B?\302B \3010\362-\301\241u\313A\254\363\321\300\010\2362AV<\250>G\3600\300\356(\313\300\314s\217\277\0238\022\301\364a\352\300\210\252\304\277\035\260\024\301o0\005\301\020\336\200A\271\302YA\246}\004\300\354\251\006AhM\345\276\200?\312\300>\013\303\300\212\234\304\300\"\375\204\301W\016\323>\210$\371@\315\221WA\364\370x\276\222\t`A<\025}\2763,\337\276\302\201\337\300\200\322\016\301l\366\035\301\177Z#@\234\032\036\301\261p\247A\000\000\000\000\362\021NA\206\205\205\276\307\251I=\273j\353\300T\272m>\"\027W\300\343\321\351\277\234\036\001@\376\217\005A\000\000\000\000J\351\362?\375C\005\231\223\222\222>Z\2420\300R\001\017@\261O\202\2746\014V\267\234#\337@B\264\352\300\000\000\000\000\\\323\035?\000\000\000\000\333M\375\243\352\014T\300\000\000\000\000\000\000\000\000\000\000\000\000b \202?@\251\"@\000\000\000\000x\310z\276\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000dcy8\242\240\240\274\204QK>\000\000\000\000\250\246&\276\026Zv\270\000\000\000\000\316\267\204\274\000\000\000\000\265\304\304\275\367ur7_\207\001\277\212\001\332?\217\216\016\276\273t\371\276\022\211q\276\363\361\274\211\271^t\276\370!B\271\273\312\036\277\"\"\242?\370\242Y\300k\271\207@\031\302\375?2Nz\300\214<\213?n\316\303\274\017_\037\277?bV\242\307%\032\301\325M\314@\3575D@\302\266\225A\356\214\004@2yz\301R\343\311@\035{\327\300\206\264\220\300\311\351\251\2472\263(\301D8\341@h~\275A\330\271AA\254\351\257\300e\246Z\301\364\324B@[\253\336\300\355c\023\301\005L\223\246\361/\310\277z\320\340?mg0A\311\207%A\032\032\034\300g.$@\375\023u\300\010q\340\300H\302\013\301\263\"\007\3003\360\026@\276\024\010A\201\330,A\240\247\260A\263o\201\301+\213\223\301\212(\270@p\000\032\301\337\331\354\276\340_\231\300\340\232NA\0306\375@DZ\241A\311\207QAB\270\222\301\002/O\301\224\037\254\300\016\376~\301\234T\006A\231\031\036\301\207>\200@\022\335\'A\231\357\035A\375\220\221AE\202p\301\353\323\216\300\221\n\n\300\"(\271\301\267tGA`\303#\301}I\223A\302\353\026\300\360\320\370@\031\017\303A\310\353;\301\366\007(A\024?\227\300m\322\304\301\320\374\027\300\2037t\301)\205\247\3004\nA\276\251\035tA\263\256\025A\267\260\315\300\316\222\216?.\365\201\300\272\366P\301# \232Ad\'\200\301A\204K@\263\031/A\000zX\274q\370\226A\'\246K@&b\223\301@\376\026@t\311.\301O\251\033\300U\315\327\300\342O\234\300n\223)A\035\301\200A2\013\"A<\270-\301\261\337#\300\242\2260\300\034\324\222\301S\356\202A\322W\\\301q\037\377@\327D$\300L-\033@@\023\013A\374\026L\301-\030\233A\356pg\277\214\364\201\301Qj\347@]\313S\301U\025*\301\330\224\247@6\226\365@V\231PA\313\217\004@2\244#\277B\366\000A>\007k\301\2231IA\376\221\265\301Y y@\262\263\225@F&\307@\332\261o\300\356\305\024?\352Q\234@aj\251@2d0\301P\373\002@\231MM\301\037g\311?\037\3353\301\2333\222@|\301\021A\350\273\361\300h\010\222A\321I\265\300\257\327\354\300\314F\257@\275\341\343\300c\373\265\276\246\017\010AL\250r\300\014\371\246\300\316Z\023A\330U\205A\272\221\027\300J\3518\300\227\t\243\300\027\234j\301X\"\311@pk\300\000\000\000\000\344\"#@\222eJ\274\000\000\000\000\n\203\354\277\303\375\374\275\000\000\000\000\031\356\354\275\241\255\366\27676\026@\000\000\000\000\t\006c>\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\303D\211\230\215\214\n7li\"\236.\216\207=\021\302N\267k\231Y\276\037w7>\000\000\000\000\356\216\006\275\000\000\000\000_\013\216\277\323\363\252?\303\373\034\277\241\233\014A\346v\014?\356Z5\300\\\213W\300\006+3\277\353\220\321\277\\@\322\276\250\234\271\277\220\0208@a\014\363?\302\304\206A\016\375\334?^\241\212\301\222tp@z5p\3007\001h\3008\177r\277\224[X\301\220\010rA{\".@\\S\265A\227\025\\@~D}\301\362\355\000\301\177;\025A\310XZ\301TH\016\3000\'\007\276\253/QA\222\002L@\267\341\202A\001S\271\3007h\177\301\3340\202\300\337\006\026A#\032\025\301(3\327\300\376D\002\276\203Kh\300j\334\203\277\007\266\200A\222p\017\301\2348\370@\001T(\301,J\003Ak@\257?\215^\022\3010\017\214@b9p\301U\022\017A\022\217\306?\036gq\300#\324\216\300\364\360\264@\373(\232A\002\223\024\301\312K\350\300\304\030\013@\327T\233\301\202\021\341\300\314\324\204@\272a\326?\316\200\340@IC\022@\033\276\200AI\232\361@\364\344g\301/!\232A\000Y\371\300\0045H@\tb4@\352\301\247\300\274\374\002A\351o\033Ab\223K\300d\270K\3015\252b\301\351\214\357@&\034\210\301\341Y3A\254\317!>P\363=\276AQI=\237\034\302\301\275N\332\300\346\260oA\331nfA\2132\215\300R\330P\301.\177\204@]q\013\300\344\013cAV\203\036\301\031\207\023\301\336\314\351@\317v\021A5_\201@\311D,A\256\275\336\300\304\032s@s\236\177@\217\243~\3014\252\225\300\332\203:\301\340\306\007A\226\211\244@>\271\340@;N\316@/\036\372\300\363Y7A\325)\337?\312\365a?U\222\035\301\032\350\024AC\327\305\300{\372 A\224m\200\301_\266\262\277\003\237\256@TR\336@w\023\205@M\006\016Ah\353U\300|\326y\301\027_m\301_\277e@|\035\305@\355mP@\"\030\002\301L\014\030A\352\350PA1 \027?\304\026\266?\337\363\304\300\3112\277=;\301\316@!\311\240\301m-GA\206\210\017\301R>\351>\032\272\317\300?\263\334\277\222\346#A\202(\273\276eo\342\300\213QQA\257\2429\301o)WA|\312\227\300d7\245@\235\337\325\300\234];A6b\245\277\357m\007A\231\314\021\301k\005a\301\251\3658\300\233\225\002\300\361@\275\300\245\340\205\300S\222\030Aof\001A\007v\034?\005\220AA>\242\252\301nJ\273@_\353-\300\352T3\301\371\275\024@\2240\326@\314\342\343@S\325\316?\005\007\177A\277\242\227\277Em\354\301y\251BA\345\201z\300\266\303y\277\007/\037A\241G\036A\330m2\300\320\263o\300\31160@\334C\265@\2064[\301\206|\206?\244\234\001\301\213S\353@\316\375/A\246\210\231@6\t\331@\203=\320@\315\020hA)Ig\301n\366\025\301\324\227\225\3006\212\263\301:tR\301\246\233(\300o\3313\300\247QG\3014\\\245AU3\215ALy\340A\334\233\304\300qQb\301>\217p\301\021\306C\301T|~\300\024\032\210\301&\200\217\300\321\366\342\277\034\255\306A!\004nA\017\273\241\277\322\010\272@\004\346\230\300\240\371\306\300\207\026\224\301G\304\023\300\200\227\361\277PcL\300\243\240\177A\001\357\256?\016\301]\277\030\221\216A\272w\013\300\234{\000\300&\334\355\300k\022\036\2777\\!\276\221\356d\300\236\221\270@\006\"\324@\334\225\353\276\313}\007@\003\254\205\276U\315\217\277z\221.\300M\273:\276\000\000\000\000\223\337\246\275(WU?\300\275]@\354nP\272\272\272:\276\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\350\356\210\242$\250(=\000\000\000\000\356H\254\206\000\000\000\000\000\000\000\000K\335\373\265-\246(\275\330\013\274\276\207V \300\300Mi\277\214\267\"\276@W4\256V\266d?\021v\002<\323\346j@nc\004\277v\343\344\275R\301p\277\360e\036@ j\277\277}\003\005A\263\346b\300\337\324:\3011\354\025\300\234o\200A#c\247\300&=\316\277}\342c\300\331\360j@;1$AX\217?@d\341*@\034h#\301\370\362\210@\310\230\343@\014\n(\301}\311\326\300\336w^\300\235\275a@<4&@n\335\021@\377VAA8(\276\277\313(\212\300\204\360\203A\321\334\001\301\270\253\234\301\016?[@5K\352?\205\312\177@^\r\346@\035/\033\300\230B\325\277\371Jq\301\216k\302A\206\277\010\301\004kP\301\\X\202@,E\014\301\r\236\332@\001i\320\277\023\263\027@\214\307.\300O_\235@\371R}A\252\300\330\300\376\005b\301\224\222A\300\\W\016\301`\331\262\277\022\201\021A\324ca\300\270\004\001\301\200\244z\277B\322\251A\214\277\215\300\250x\032\274\000\373\305\301\313\327\227\301c\307UAdm\030A\350\270hA\360m\360\300\032\312\364\300\365\377\307@K,\237?\204n_Ag\273\260\3006x\007\300\217B\220A\303\322\241A\277D\025\301\357JD\276\315\363h\301\217cWA!\230J\301\022\341\352\300\033\327(\277N\227\254@\036\313&\300#}\004Al\266\225\301\202\345\'\301Oi\001\301\362\034\273Aj4\306\300\001\023\033A\232\202\223\3017\375\266\300{t\304@O@9\300\021\016\201\300Q\\H@\n\t\321\300\376\273kAa}FA\230\245\232?o\037rA\322\213\006\301\026r[\300\032\253>AY}<\301\322Y\207\300\312Lo\301\002(e@>\3128\300\272\375pA$\242nA\265n\327\300\3205Y\300\"b\"\300\260\010\236\301YO\364@\247,N\301\305z\315?\341\323=@\221v\221A\355\317\212\3005;*\301\210\2553Av\006d\300^{\240\301\020\376W\300-\263[\300\300\375\010A\343\021uA\267\312$A~%b@\220\316\237\300\306~!A=/2?1\325\242\301\002\202q\277\246\223\356\300\262\216\353?\336\237`\277k\260\223A\314/\373>x\331\246\301\017\177w\300Ta\332\300\254\031\312\3007\232>@\010z\016\300<\212,Ao\301\030A\203Q\202AfZqA>^c\277\"gF@\177\002\205A=x\301\301\315\235\337\277M\315\300\301k0\326@\360L\373\300\021%\212AK\177\n\301\273%\321\300\271\206\014\301\033\030T\301\306,v>\250\217\222A\304\353\340\300nu\204\300?c\322A\204\214b@\211\204\341@E\264\347\300\247\243\t\301\n~\020A\035H\207\301_M\341@\322\357\020\300\2066T\277\332\226\277@@\266\327@XtzA\"i\013\300\023\232\276\300\336\031\340@/\260\352\300\357\021.\301liv\2761L\213@(\217\340>\322\n\\\277\221\356\323A\214\213\t\300\216\022\031\301\247 H@A:\357\277g\034\023A\335@\035\300@E\333\300\020\342X\300\243yH\301\332\005\274\277q\255\316@C\274\247\3011\007\250\300C\202\251A\256i,?\005t^AK\337\243\301@(\222A\335*E\301K\030\240\301^D\240@ml\204\301\014\350]@d\016\353?\363\020\005Be\270MA\357w\237\300/\333;\300\007>?\301g\206t\301\257\255\037\301\333\010\017\300\030m\235\300\332\262$?{5\225AT\201\200?\036i\013\300\033\203\220A\332\025q\300\366t\377\300\336\216w\300H\316v\277NA)\277|\303\364@>\362\306@\234$\240\277(\251\035\277\206\231I@\214\027\326\277h($\300\000\000\000\000\332\273\200\213\3569\333\241\250\357\353\275\246\226\205@\302\352\352\276\323[Z\271\206\023\n\277\377\375\375\276\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000*\027\005\277\227\016\260\244\016\334t?w\307\233\275\357&\346\235\000\000\000\000\251\371i?\225\211\204\276i\000\202\277\312\310H\275\210,\321\300\303\032\217@\272\342\203\300\275\310H\275M \212>\347\337\324\250\014Q\352@\361\313\002\277\223\017L\277\310\3678\277\3236\211\2776\242\027@X\3354?\207\215\250\277P\253\031\301\220\260u\300)\252\275A\265\023\255\300\226B\232\300\0243\325\300\306\001M@\311\252\273A\010F ANC\376@x\027\356\301EE\205@\265i\327@F\350c\300\226\222x\301\352\363Q\300G\217\006\301\016\303\243@f\002\331A\266\r\272A\003c6\301\233\210\257\275\0378\257\300\276S\276? \261\342\301\013\352\007A\207,\026\301G\334\211A&\201&A\004\024\270@\227\330u\300\257\003\256\301\336%-An\362;\301\200n\277\300C\366\n@\272\032\327\300Sd\026\301\330\306\206=T\326\"A\264\236\021A<\271\023A\020\004\001Aa\031#\301U\362G\301\363\020Z\301&\220\263\301AykAk\310\366@\222\260\240\300PLHA\034\266\262\300\010\307R\300\243\215RA\016\370\356?b,\263@,Q|\277l=\340\300\032}\321@\266\210\310@9\014x=P\247\310\300\265\247\023A\240\001\021\301\007\377\213\300)\355\360\277\305\330\035A3\364\001\301\246\210\232\300\371U\177\301W\013\035AZn\347\300\232\220\207A\310^\026A\352\000\000\301\300\364\321@9\'\020\301\370Z\225@\301\242\266\300\230|\333@p\263\266\275\375\253N@\016 \325@\013R\r\301g\322\212\300\247\033\001\301\264\007\024A>\215\343\300\024\307?A\001\270\225\301X\341\346@\014\255(?`L]@\2404$\301\302i9A0a\360@P\006\216\301\3660\216A$%\375\300\316\345\037\301U\267\027\301xl}\301\036\013]A\326\262\344\277\314P\275A\020\274\332?\364\264\225\300\337\310\nA\250\225\236A\016\262[\301\337\032WA\020\234S@\251z_\301\227\243\305\301\215\001\"A;\222\"\301\210\032\204\301\253\334y\300\235\350o?\250,\223\300_\346U\301\317Q\253\276\226\213PA\343?pA\362%\237A,\345\330\300\3207\217=\021\234\007A\210\327\253\275\333\306\034\301\036\253\333@gV}\301pS\307@\320p\234\300/\346{A\301 \213A\243/\006\301f\021\216\301\261&\307@\013\333\353@\307\200\241@\251\227\222\301\251n\036A\3210\326\300\307P\247@\234\027\206A\34630\301d\207q@\211\332\213\301\362\035.\301C\324\242@\216e&\301\262wmA\2359\003A\364\000\216?\032j\334@\237<\204>\221\017\353\300\020Q\373@s\264\336@\2732\204\277M\003\252\277\332F!\300\\p\021\301T\220$\277\031\357-\300\226\014\026@\005\001\032\301\263\010\254@\245WM\300\374\355w\300\247\225\202\301\021\320cAx\341\032=N\007\\A\365&\003\301\304:n\300\033\311\n@\374\310}\301\243\002\251@)E\304@+E\035\301\2018\'A\330\354\206A\353\220R\300\2461\271\300\322\374C\301\310Q\222A\332\037\234\301h8\nA\005\314\177A \373k\301\232\205\270\276\220\024NA\212\236H\300\224\327\030A%\346\205\300\373\241\013\302<-\370\300\305\321\221A/\025\342@\325f\227\300\273g7\277\307\277\240A^&)\300\234\332\353\2771\021\265\301\354\302\317\301!\032\321\300\tW%A\263\250\254Au \227A\370\227\375\275}\214yAe\312\024\301\261|\222\301\2447t\301zM\233\300?#\213\300\t\345\362?\207\306+B\334\277\201\300\n\316\366\277\021\307\246@w\267\245\277>\346!\301j\353\r\277=\254O\277Z\031\254\276\2747\025A~\341\246@)\277\036\300\232\323\202\276\t[\223?\347g\221\277\343j\316\276j\230\300X\345\365?\t+\022\300\367_Q@m\035\217\276\326\324\324\275O\363\321@\370e\213\277\n:W\3008\022\251?\032 w\275\331=\214\277\024rw??k`@D\241.\301\3172B\300=\331\301AI\267\026?G%x\301r\014\347\300\236q\330\275\314SlA1]\rA\262\202\355@D\321\037\302J\220\305@rC\214A\365\"\341@\305\326g\301\034\233\371\300\344\t\263\300\324>\250\300\003\n\234@\220\256\261A.\347W\301=\325C\300N.\007A\360\030\331@\030\235\343\300>\334\365\277\341\252\035\301\013\204\361\300\226\3128\300z\324\242@w8H=,\3354\301\351\236\017A\351V\177Ae@\\@7!K\300\337\340\234@\225\031\'?9j\010\300Yn\275@\rr_\301{\210\003\301/\235\351AIf)A0.\276\301\026\236\006@\233\362k\3019v\017@\344\313\\=\363e\332\3008\026m@\314\322[\301_\261FA\034\276\225\300P\277\233A\364\275\217A\036|\217\301\321\355\004\301rG\241\300\303\027\324@Y`7@\214D\242>B\252d@A y@\256j}\300\213\005\033\301\200&\265\300\325@RA4\322W@:\000E\301\376\230T?\211\3210\300\366gI?e\233vAf\215G\300T\214\317\300\355)`\301\277\244.?\342\217\232@\252\275\331@\310&N@\326o\236\300lR\223A\344\r1\300\204\247\267\300v0-A\316\272\202@k\310:\276\214|\243@\207XI\301\024\265l\277\246\272\271\300\323\376\tA\326\372\035A\243,\230\301\010\022\220\300A\370\t>]z\035\300\315\\5A\305\317u\3012\352\003\277\330?_\300\274\204\375\300\325\345\tA\237\235bA\266\365~A\254\003\335\2754\206\262=\334\010HA\223\364\230\301\351\363\215\300/i\246\301\321\242FA\346C|\301\242V\232A1\223+Ap\256\354\276t\241\376\277\\J\225@\362\035\357\277\237\360\270\300/\326\235\301Y\315FA\271\\\001\301?\231!A<\267\364A\300U\202\301??\376\300\000tr\300k\375\235\301\223W\244@E\3674\301\310\257\331@\352\004\336@\026\346\031A\3712 A\301\332\032A(\026\346@\344\007\215\277*\371\020\301\332\213\030\300st\343?\204`\370\2760\353\022\301\3047\316\300\356]\370>\273F>\301#\003_\301\311{\307A@\333\323@ \257t\300\002]\324\301\207\334\235A$\256\251?\316dF@r\321\362\300Z\277\261?\302\273}A\231t\350?F;,\301\360\264v@%\005{\301\317\031\314\300\303\024\204A\270\322}?\203\342\235A/\206\025\301*\345e@\357L\226@\320\0230\301\320F`A)LB\301\234\245\346@\027\376\241\300\026\031;\301\272\233.A\371BL\301\244\021!\301\351Y\007@\232\241\210@\273\305\031\300%\030\217\300\376\300\325\300B\352\345?.Y\212A-c,A\330\234\t\301n.\254\300\372\231\335As?\216\3014\222\241\300\252&\263\301\250\324\203A\252\0234A\376\024\340\300[\262}A\346e\245\301\216\022\000A\322\354\372\301\255\241\034A\342\211\347AB\234\326\301)@ Ac\032:A\254\356\253\300\351\333\030A\350\317\364\3018f\345\301\217\323\343\301\370\247\311@\025\005SB\325**\301\024g\370\300(\266\010B\341QW@\246\230\317\301\202\263V\301\301\365\027\301\327\202\257\300>\013sA\031\272-B0\0304\301\343\350\004\301\341\334xA\347{\206\276\340\030.\301\002\371\313\276\343<\221\277\240z\006\277\35060\300\260K\230A<\037\032\301>\016\231\273\241\274\362@Rm\245\277\005\323\210\276X=\037\243\00179\273\274I\021\212\250\247\247\276\233n\352\276(\334\310\277W%\204\257\213\371\'@\214\312K\261\000\000\000\000\000\000\000\000\000\000\000\000\276\r\216\206\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\245@\001\221\000\000\000\000\000\000\000\000\274[\016\251\304\031\033\3006\214\014\277\316\314L\276\262\227\007\207\255\376\250@IVR\250\274\363\006\3008\374@?B\324E\300\326@C\300H\244\205@M\025>@\032\274\231\276\225\371.\277\330\215\nA\224:}\300\\G\257\300\022\253\235@\321\363\360\276x\320m@\006\223\016A\337\307\030\301T,i\301^T\203\300\272)\273A\300-\310\276\236\310=\301\310\251&\300\240 \306\300\242Y A\354R\000B\341\006\202\301\020\203\343\301\024\350bA\321P\200A\267\362l@\254N\264\301\007\033\241\300Nl\217\301\334\000\277A\211\250\rA\270\350I?@\311\\\301.\367D\301\360\037\020A\233Z\215@#B\010@\336;\262\300\207:\334\3008K\261\301\003\004\006Act\375\277\372ed\300\323i\014\30148_A\322\373\344@\315\244\233A\363k2\301\333\\4A\006\214\216\300\032\332\225@fm\204\300\245\327\215\277GYt?\272\t_@\0347\300>\200\n\250=!\005\005\301\3240f@J!\312\300C\374\350@\305\323\363\276\'\2178@\245\022:\301\014\014j@\327\312\007Aa\263T?\203)\312@\001\261[\301\212*\020A\245\016\006AF\010\311@\2245\210\301mY3\301\003m\350@x[\353@\355\002)\300V\265\177A\247W\346\300Ff*A\231\353\232\300\327\254\222\301p9\350@\035(k\301\216\307_\300\354\376\265@\327\335\017A\007\320\014?*\007\321\300\006\313|\301\353\236\203\301\252\276_@\313wJA\257?\242?n\266T@\334\251BA\262q\252@2\310\217\277Vy\022\301\372\032\246\300n\206\331\300\362\314\374\3002\355\023A\344\204\034\301\302\353\271A\320\"\010\300\222r\031A\334\354\267A\030\341\240\276V\262%\300:\300]\300\224J\304?\351U\341\300\rM\273\301\275{|@\017H#@o\251\271@\026\253\032\2775\335\005@4\260\247\3009^\010AU\335\250\301@S)\301\231\236\331?\363\334\345A.\022\t\301\376\277\241@d\357\234\301\017\271\233A&\237?A\333\322\230A\344\273j\301mw8\301xP\030\301\271\210\225A\224J\344\301D/jA\352v\256@\350LTA\233\266TA{\355|A\010\202\313@\345\360\247\301v\200\253\3015\330\271@$\240\025\301WV\003\301*\235\330@=(`\277\304\346\003\301\312\231F@XcG\300\322\340\\\300n\333\366\3013!\317Ak~q@\3433\340@\0338\'A\3773}\3006\347\253\277\275E\233\300\330\366\342@\2727\004@jI\256\301}B\014A\312\217\305\277\031[\243@Q\222\245A\2177\324\300\327\326\213\277\023\366rA\256\211\272\300\204t\'\3017)\241\301_\375\024A\024\306`\300j\356\036@\254V\357+>\360\303eAx0\373@Y%/A\265\244\317@&\320H\301 \327\273\301]a\014\301\375\030\251\277HT\247A0\030^\300\305\306\227A\271\003\256\301\342\335\346@\263[\324@H\332\241@\273`HAV\315\235\301_\272\355?&\002\205\301.\337\214A\244\227\207A\267\242X\301\007\214\000AmQ!A\234P\211\301\005\010\375AY\265T\301\342\324\340\3019i\370\301\243\210\355\300\322\010DB8bY\301l\001c?\245j\221A\246\300\307\300\324Y\211\301\t&\030\301r\363\231\300\'Z8\300\366\233\375\300H4FB\354\372\255\301V\303\233?\3004\251A\003\013\374\300\245B\377\300\203\377\310\277\307\264\016?\362:\311\277\265\212S\300\351\270\350@3\345D\301\330q{@\250g\210A\025\014\005\300~\335\242\250\236D:\236\242\177\307\202\000\000\000\000b_\365\276bu!>g\206\210\274\"\037\337@\371\365\275\300\264\2622\277\000\000\000\000\000\000\000\000\000\000\000\000p\254*\207\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\347#=\221\000\000\000\000\000\000\000\000\005@\261\251\350\003\300\277\330\016\017\277\001\377\376\276w\323\250\207\303j\223@p\211\002\251\371/\003\3003]\213@\247\214\307\277G\033/\300\364o\272\276$o\375\277\\\027L\277\0003a\300D\357\230A\231f\023\300\203k#\301\032Xc@{\266\224\300X\347\025An\227vAJY\212\300\226\006a\301\231\355\234\300V\205`AV\327\337\300R\322\355\300}\013\334\277\352G\n\301\277i2\301\276+jAt\236\352\300\022\374\254\300\355\345\210?\177u\\A\224,\217A\216\026R\301\226\033\017A0\350\204\301\030t\276A\204\200]\300\255\303\003\3019\236\315?\223]%\301\374\277\300@Dw\350\300\026\354\260@\207\306.\300\3660\272\301\345?\312A\241\331\227@P\355I\301\244V\232\301\206\022X\300\360\265FAq\324\206@\355%jA\200i\301\300ET\317\300N\271)\300\264\000\006@<\026\025\301\025\020jA\247\221\246?\005\000n@\235\211\364?\223\374V?\037\244E\300\307o\027>\226\236\335?P\007\023\301\177\306\200\300~\261gAr\215\276@\223\246\376\300\026h\250@\362ZT\3006\2541A\323\230\'\301\231cI\301\367\035\352\301\304YMA\270W\220A\020\023X\301H\334;A,\353\027A\204;\'@E\367\033\301\352)\222@Ju\313?\\\276\216\301\215\025\034\301<\3232A\270\361\036\300\213]\323?0ddA;?\322@\326\226\305@\312\0223A\250J\367@\213\355~\301\212\236\245\300_9\026A\365\005\024\301]n[\301pi\307@K\000W@\245G\314\300\036;\374\300Q\255\214@\2401\250\301\245\202\032A\205:lA<\266\223\300\225\335!\301~\230\201A\0021\240@\006\032.A\242\220YAp15\276\3669\n@F\304T\301\320\255[>\300:q\300\315\372,A\211rN\300-\217\211\301\323{0AU\212\343@\030\023\306@\360\264\337@\232\303\331\301d8\005@\212\3050\3014\024OAv\300\016\301)_X?i@m\300\214p\301A\265w\027\301H\341D@\343aq@\263\022\350\300ved\301\200f\245A\303\206_\301\366\005B\300ucKA%0 @\321\216\213?\366$\366@\2405y\301\366{\356\300\361\264M\301\344\277\354AH\200\243\3012\332.@\333\224\214A\010\223\306@\277r\200A\tk%AS\t\230\300?\327\214\301\377\345)\301<\206\320\277\223\231\"\301I\000\255\300\321\270VA\021\361\346\300\216\314Z@(\237\371@\322J\n\301a\246`\301Y\251)\301\251\2522@2\345cA\322\275\225\277\\\311\001\301p\035\376\277w\r2\301\275\337\327@\213U\034A\250\365\226\301\250\261\222\301\310y\201A\315\260\302Alw\262?7\340\003A\330\246\234\301\314H\230\300U\225SA)&\004\301\305Yq\301\256\345~\300K\\\251A\270\267\021A\370`\235\275\207\202\003A\331O\224\300,g\237\300\321i\262\300pN\375?\001\376\247>5\252$\301\313\376 A\200\237\340@9\001\010\300\330%BA\2578X\301(\223\014\301\201\024HA\340\200\254@\211\004\316\300\341\204\233\301\315\033\230@\\d\016A\343\325\217@q\277VA/\245D\301\214\347\372\300\374\263\225@\325\223\241\301\n\362\245A\372\336]@\346\253\014\301\034#pA\336+\003\301\262<\326A\314\321\204\301>D\372\301\350r\274\301\013I\344\300\267\203\241Bb\264T\277o\036e\300{\312\304\300R4\223\301\t\276\200\301\353\275\300\300\301\202\025\300\005\024\332\277e\243\024\301\007\264zB\267t\265\301\033Y\204@./$\301\342\352\311?\222\352\036\301p\215$\3007k\214@\236\036\207\277\240?\003\301|[\241A\233V \301`\026A@ct\005A-=\204\300\354\323\330\232\343\236\030>d\361\022\213\237\221\232\256 T\306\275\210\276\224\275\313\023J\277\274\241\341?\213\242#?\315\313\313\277\000\000\000\000\000\000\000\000\000\000\000\000\020\357J\206\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000*L\014\220\026\025\225?\000\000\000\000\225\215\260\251\272\240\034\277\004\333\332\277=\376\375\276O)\250\207\027\250c@<\r\002\251U\272\363\277\331\330\330\276\241\240 <\365\263\207\277NO\n?\250N\032\301y\375\374\276\'\177\247\207\350\332\353A\211B\240\277\\C\211\301\031\344^@\20586\300V\314q\300\255\255\277A\022\222c?\317\r\243\301\256(J\300\023\013ZA\375EV\277AM/\301\312X}\301\267\233n\300.\250E\301\371V\223A}\231\331\300\033_c\300x\347}@]3\204A\027p\365@\256\363\211\300s\032\031\301\036\215\351\300H\000\266@\367K\351\300\346\222T@?\343\006\300\000\370\235@\305/v\276\302N,A\217G\345?\366\254\376\276\205Y\324\301\"]3A\314\270\337\300\010\034tAh\277\032A\036\262\003\301\350UP?F\272\005\300\225\317\354@\273\322\355?\005\375\231\301YG\317\277\337^\302\301\216~\366?\025\240\232\276\037\325\032A\352;\342@\203\301\246AP\354\202@Nc\237@\336%\035\301\336\r\275\301\267+\027\301\323\260O@\325\217\327@\353\362\032\300\016\221[\301s\035\203A\t\215\335A2Bj\301\257m\203\301\247L`\301\r\356\207\301z\202\250A\3050\372\277dA#A\342q\332@\347\223\257A\301\225\177@\201*3\276\2621\013\301Qv\031\302\007\224\253\30190\306Ax\272L\300S\214)\300\022\203\215A\265%MA\365\002\232A\334d0\277c\365\356\276\025\013\371\301\277\235\341\300\221\000\214\277\266\024y@\"L\013\301(\211DA,\023yA\311&\213AX\243\324\301\255\000\022\301$]\r\302\'\371\264\300$\032\360A\3139\335@\246~mA -\"\300\203\217\375@\177V\234A\027J|\300^-\230A(\302\032\302\307\341cA\250\357&\302\211\200\272A\265\345\253@I\361\311@\000{\306@\214s\036A\021&E\301\257h\225A\321*\237\301\363$\354?\312\217\360\301 \350\t@W\036\373\300\303\352\212A1Q\330A\316\210C@Bz\301\301\233u\371A\236Qy\301\376\370 A:\214\264A\354\340_\301\302\021\273\301\374\321\211@\244\027\013@u?\333@\022|\270\301\361+\345@|\200\003\277Q\233 \301\263\264kA\326\365\007\301_f\323@\335G_A{1\000\301CZ\365@b\271\r@\360g\351\300&u\213\301\355X\210A\304]|@\206\334%\300\245\026\212\301\303Y\014A\317yA\301\266\225\305A\177\226\246\3010(\267@A\033l\277\\\224`@$\271\307@\357\275u?\347/g\301\241\035UA\302\342\311\300r\000LA\342\021\242@\240\373\023\301\013+KA\344\367#\300\270\234|>m\033e\301\034kz@lOF?\340\006-@m\317:?\356Z\037@\345\321\361\300@\342\306\300\263\'=A\200\227,\301\2530\275\301>\354?\300\254\342/A1\215B\277,K\325A\357\242\274A\'O\032\301t\355\263@LJB\300\234X~\3013\202k\301Z#\000@%h\021@\2513\213\300(\267bA\331o\005A\233\177\014\3010z\227\300\300\362\033A\300\330\n\301\026\355\364\301\303CO\301M\323\241A\365\335oA>LGA\207A\225@_\260\313\300\0001\n=\202\241\004\301UV\236\301\247\353\210A\271\266dA\345\261&\301w\242\202A\331J\362\300\276O\203A\'\347^\301v\265\010\301\034/\222\301*\231\324\301\273\277NBH\024\306@\000Z\017\301\265\273|A\"\335\\\301\262\016Z\301\320T\340\300 \343\315?r\330P@OCO\301\3327\212B\250\310\312\301~\301\274\300\322\250\r\300\251\261\335\300w\262\367\300\217&\036\300\371\3266@v\331\242\276\321P\222\300\0238\226Ank\231\301\323\010\262\276\306vxA\373M\"\300\271n\005\233\241%\373>2$\t\300\002o\222\255\356r\031\273\342\340\340>\333\351\241?[\347\346>\243,\346\276\270\241P\275\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\247e6\254\227\226\2264\000\000\000\000\371x\352\247T\204\200\275\356\361\367\277\374\253(\2754T\337\205^\\\036@\247O.\247\256\344\335\276\201\200\000\274\036\307\201\2765\017\360\275e[\336\276\001E2\301\302\315L\276\013Xy\273,\004\301A\206h\177\277\246\210/\301\322n\310?\317\356\300\274\255,e\301&\017\261@\024%\253\300zn\375\300t*(\300\234q\321A|\242*A\3139[\301Z\342\265\300\211\350Y@\t\240M\301\3364\221\277\253\214\210\301\033\335\343?\207\016U@\007+\254A6g%\300\252\233\024A\327\031\275@\274\013!\300\365\342\276\301\351\225\025\301\213\2755\277\nR\204\300rM\232\300B\315TA\366Z\205A\202:\030A\202j\212ANO\263\300\345h!\302\314\346l\301\306\346\005A\005\251\232A\352\"v\277\222\346\003\300\214\337\232\300\256\367\274ABj\315\277\301-8\301\214\333R\302\035Q\033\301\257\025:A\224ro@\226\310BAW\351\324A~\310 A\000C5A\212\227\r\300\346\334\002\301bN\310\301:c\276\301\232\026\251A/:<@\262>\017AC\241\320Ad\007\312\300$\334\315@K\323\t@^\2035\300\275a\222\301\200=!\300\360#j\277>_\004\301z\236?AF\314`\3006\327\330?b=\244A\\/X\300\300\2634\301\r\331\213\301\200VlAq]\202A]\017IA\275QM\300*L\t\301V\312\347\300\325\256\361@\210\361n@\261Nh\301\2126\007\302N\275*\301\222\305mA\372\365Y@\363\031`A8oEA8C\220=\000=)A;F\306@`\347\304\277\303\263\221\301\320t\212@\213C\'B\337\356F@\350\326\023\277\204j\003\302\005\274\035A\036\370A\301\215}\244@\240\333\010>k\260\311\300\361\2732\301i\224wA\311\222\025Al]\354@\347\275/\302\277\013\226A\316x\245@\314\304\216\301\003p\325A\272\360\200\301\360\010\257A\343\367\217\301\036K\001\301\016h\030\301\322\016\252@\376\360\373??\226[A\240\255\315\301f}\000A%\337q\301Bgq@\025\025\222?Q\277\324@Z1 \301j\024\241A\217\017\347\300\232\250\222A\002m<\301\035\305$A-\350\r\301\206\001\257@\272\n\003A\354\027q\301T\037\017\300\2208&A\216\177\275\2770\270\242@\321t\254@VN\254@/X\353\277\301z5\301\334%e\300\215n\264\301\341n\022\300\206\023\206AI\252\027A\032\352\222@\3715\004\301)\030\270\300\374\354K\301\331\031\006A\242\236y\300h\353\"\301\341+\021\300\342\355xA\201\231/?\344\345\223AFj\247@\330\302\020\301\200\311\022A\000f\265\3003?\340@\353\334\224\301\020\317\217\301\024\315\347\277\214\252\256Ax\324\035A|0l@\232\351\274\300!\206\267@\376\331\222@\033\313L@\247\230l\301\365V\006\301\230\247\373\300\245\340\"AZ\350\030A \365\314?\003\326\271\277\2159N\300\224\004\323\301\323\310\272A\241\251\030\302\320Aw@\273\362\021\300N\336\257A\243\234\245A*\374/A\223\032}\276\220+6\277\237\233\262\301n\272\023A\212Fo\302\371q\205A\"n$A~\3368AJ\340\302A\022\373\346@\370\205C@R-\207\301%\311}\301\321x\277\277\213\221{\302l\224\000B\201Qk\301\366M\rB\216p\010B\303\025\025B\253\016\000\300\374\316uA\006Z\365\300q9\017@\311\026\232\302\216\336~A\323 6\301)\207AB\350\263\250\301\250\271Y\300;\201Y?\362j\204A\213\365T@\306\213\352\277\200\035\270@,\221B\301\376\216!@\035\030b?R\356G\301I\215\360\300J\007\201\277T0\200A\000\000\000\000\231xh\300}\201\247A2\373\275\301\004\247\240@|\"\346\277\223\355\210\300}[\017\232\000\033\007?\314\350\356>\354)5\211\343L\227\277\204\356\033\277\206\275\322>\243k\027@N\300\344\277FOF\276\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000=<\2245\000\000\000\000\255|\325\211\"\335\230\233\247\244\262\277\213g\301\264\000\000\000\000rm\273?AU6\242\"\215\214\275\000\000\000\000\035T\242\275h\355\320\275\341\224\220\275\n8\000\301\026\2539\265Q\032\262\272\r\361\216Ai\2229\276\217\266\026\301\322\222\340?\266>\020@(\'\201\301\352\274n@\262(\371\300p\262\360>\006\235\034\300Y-\303A\245o\344@P\337U\3015\026\034A,1\001AH\2156\302\0239\222\301\355:\317\300d\346#A\243\3269@\267V\305@\320\223\023\301c\342)B\247\214|\300\213\367\326\277\001\312(\302\310\223\376\300\237U(@K\313\215A\035\367\345@\252\332}@\236X\003A\202~\200A^KmA\301\260\327\300\274\216T\302\334\300@\300\266\010\230A;\251#\301$\307\301A\331h\237A\241\267\026\300\332\206 \300\225\247>A\234\024$\301\274\302\032\302\352\314\302\300A\252\276@\2469\200A\344q\003@\274\363\235@\311[\372\277\n\306\200A\021\306b@\017>b@\027h0\301\222\324>\300^\267SA\325c\316\300\311\356\370\277\342\264\031\300\246r\020\301\205TXA.+\273?\316\216\016A~\217/\301\231\271\000\301\303`\217A_M;@\322mQ\300\250(&\301tSH@6\204\326\277\327`uA\317N\233\301\213\235\206\301B\201T\301\000Q\262A\207\370\314?ZBNA\230\035\017?\363\005;\301\336\211\010A\034\217\351>\200\007\254\301tf\221@\010]h\300\254\353\232A\220\361\323?\2729\205\301\334\302E@P\370\002A{\267\217@\034\241\300\277c\363;A\036\326\231\300@\207\316\275t\221zA\333J\246A\350\001\271?\251p-\302\317;\024A\346\027\022\301*)\r\302\020\036MA\006\347(A\005\207\205A\237\000\255\301\340\360vA\032\031.A\365\214\"\302\237W\320A\336\031\244@8x\245\301\024\344\026@\'9\225\277\3627~Ak\354\035A~\251\277\3001\2332\301\224\277\270@\240\\\301\300\261x2A\2120\372\301\027\303 A\333\302\037\301\026\3500\301\374*\235A\377)Y\301\324\016C\277\230\3524=\n\"\263A\254UhA;\366\235\301\262\264\224@\315`k\301\001\344\226@+_\353@\367\016\230\301\242\311\275@\326W\246@7\342\330@\365\266\226A\313\255\336\300\263\272\267A\221\t\361?\347\323\267@b\214\223\277\311[\234\300\323+q\301\346\"\266\300zU\374\300\332\3350A\250\013\205?\'G\034\301;\336/\301[\244lA\235\0207A\350\376\177\276$\333\014\301\257\320c\3006\330F@`\240?@\027R\022\300*i!\301\'\351\021\301* \364@\222x=>\264\243\315@h)\305@\022.x\301^\334\261\277\013\362\217A\343%\227\301PKP\300\332\007\222\301\005\314\223\301\3409fA\276c\303\276\362\345\013\301\213O\231A\252+\241A\316\327dA9\374B\301\236+\217\300\231\037<\277\267\026\222\301~\362~A\207\004\327\300\273\246\217A;\202ZA\277\352}\301[,+AE\372\357@b\330\200\300\231\005\351?\200\355#\302N\242\204A\322_S\300\244\327\207@\200\376(\277\363\267\352@\333\3377A%C|AyN\314\277h\026\005\301/\025C\301\314dh\300\026y\347\301\320$\002Bd!\010\301\366\306\316A\313\201/\301\347\226\212A\330\016~\300\212c\034B\354\315s\300Y-\227\301\243\240M\302\372\255\271A$0\262@\201}\256A\022k\347\3011eIA\033\355\315\277\224\217\030B\037,\022@>\323\232@\335\361\256\301K\355\252\301V|\313@\207\356\205\277U\216\222\301\342 \334\277\177\371\272\277>\342(Bt\326X\275[\nG\300T\245D\300\026\321\267\301\321\237\212>\2274\206\300\323\362\274\300\213\356\243\233\3231\300\202#T\200A\200\320\322\2769f\200\277gHx?\305\215=\300\022\326\372\245\000\000\000\000\210\023\025A\375\205\261\251x\027\323\300.\274\030\301\375\'\203@\206\023L\301\233\340]@\277\275\226?\2732\226@p\354\213\277\002W1A\023\026\254>i=\273\277\235O\007\301\225\321\270@\203u\310\301EU\026\301e6\363A\344\351\"A#\375[\301B\245cAo(\243\301\207\274\203A\233@dA\222\221T\300g_\017\302\263-\270\301K\340\232A\347\326\265\2774\306\325@\361!\367@\250\321KAu\3035@\247\203\241@s\303\356\300\300I\265\300c\223\251\301\000K\357@l\356\251\300y\3570A\327S\276\300\363\356\235@\002\200\210A\320\352\325A\177\310C\301\317\\{A\226\021<\301H\250\203A@i_@\274C\262\300\273f\226?t\260\017\302\036\233\364?\350\206\200@\236\200y@\243\010\306\300\242\230+?E \302@\262r\321@\350b\206@\340\213E@\005\363\241\301\273A\003\300>\244\206A\353`\267\300\014v2\300\304/\n\277B\'\230@\233\013\216@e>\200A\036\233\007\301K\234\321\301\2662\327?L\366i\301\277M\230\301{]\224?\335jR\277\365?\031A\2351\037A\214-yA*\322\216\300D\337\212\276z7=@\030\351p?H\232\231\301\177\224^\277y\244\355\3008\315[Az\305\213\300\'\004\300A\303\325\207\301S\314\032AF\333\371>]\255\220A\336\200\013A\211\321\314\300\206\264\355@;\344\206A\020%<\300\364d<>\351%,\302\010*\022A\033\220\000\301\306i\232\300c=6\300\363iNA\232\242\263\277L\235\031\301\037P\243\300W\024r?\237a\251\300\374\351zA\304\317\350\276\005\'\364\301,\351QAL\353\233@\007\000\273@\037x\'A\204[\347\300\277\373O\301v\226Z\300\234.\004A\340\2279A]ww\301\333\207\210A\323\004\035\301\311\251\241@\252\251\305@\364\000\341\300\223\267S\300\344\312\016\300\326%\320@\022EC@\005\337\004\302:CEA`%O?h}\264\275O\360KA\2353\202\301\347m|AH\227\006\277U\236H\300\337I9A\222\364\301?\247g\360\300\354\"Y\301\001^\211@e\272\177A\016\023Y\3017\321\244\3010\216SA\177Z\016@\216\037\220A\360\230\013\276p\372\273\301&FMA^?\037@\254\206\031>\202\214\366\300\254!\356\277\332QbA\021\325g\300\341\252\346@Z\276\016\301lmS\301\000\000\000\000\000\000\000\000\000\000\000\000\224\224\024\276\000\000\000\000\000\000\000\000\362\365E\300\362\365E@\000\000\000\000\027e\315\246\000\000\000\000\000\000\000\000\324q\251\233\000\000\000\000lZe\275\353\345\333?\"\224\356\277\250[c@M\0041@\371\307\002\277rK\310\247\332\360\024?\225\224\224\275}\254\303\300{eC\3015Kh>\310\252\302?\262@\367?\304Y\002Ah\226\n\300\023\247_\300W\355\021Aw\366\037\277\356\305\034\300\237\002\370@=\324\256>\020\201\330@_\225\233@\236$aA\347\004\255=w\236\307\301H\r\301?\260\321i\301\\\326\205@\014\231KA\303\370\342\300s\347\272A\322 \202?\334C\354@\016TJ\301~\324\023\301\270\250\350\277\331L\237\301C\013\307@\251s\205\301\340W\"\301\207\372mA\214\n\344@\377\317dA(\226\324?\034\231\256@\217\006\342\300\272\205o\301\3462\256@\021\\\237A\250I\226\277%\322\207\301k\225U\301\233\330\\A\000\361\220\300\357\t^AU\253\337\300\037\221\347\3006\374)@O\261\034A\177\022\257@\261[I\300\216\202]\301\035\302\000A\344=\361?\207\236\002Aw\350d\301\243\007\037\3018\r\374@SoVA\315\270\203\301\265?\233@\3543\213\301\241\202\201A^0cA\032\004\321?\2705\245\301\342@\235@\"\021,\277\220t~A\337\006\241\301\020\363\212\300\020b0\301~\3379A\253\301\r@+D\353@Y\\\333?\246\0169\301\334%\004A\264\206b\300~ka@\016g\324\276#\004\251\300Zp\301\355\327\207\300\206\371L,\242A\207\033\222?Y\330\347\300P\010\244\301\243L\271\300\220\234\237@\320\372]\301\257\372\220\301\020\304\020\301\030X\303? -\254Apv\333\300!r\274\277\236\030\317A\034_\225\301\302\342\230AeV\373@u`\233\301U\234\373\300\261,_\301\250\372\256\276]\330\240A\2125\240\300MW\240A\224\372J\3001\026\340?\371\035\325\300cB\272\301\325\355\227>\177\003\335@\240U\204A\3665\\\277\310\020\233\277\310\222\205\276\350\244Q@\226 \250@\277\016\nA[\355\206\301\306\331\222\301\031z\004B\2743e\301\367\222S\300\225\373\033\300\001YwAh\252\341\300\205V\244@\':\233@\223\370\023\301d\177\200\300\030\220NA\274\017\r\301$1\256@\340\334\311@\326\231\375@(\3635\301\235\027\177\300T\226\"\277\035c\363\300\200@\320\301\252!\315@\314\310\307A\324i\206\301\214\215\264\300\333r\317Am\004\267\277\360\360T?{FhA\326O8\300\013\361O\301f\006\274Ai\337\277\300\357\202\312?\000\017\333?\205WZA\022\271\235\301\343\365U\301\310\260\310A\023\263\005\277\253|\326A\270dgA\325\266\035\302\026\017\256A\026\tmA\240*-\301\tl\305\301\371\326\333\301aO\241@\030\342o\277X\230\323A\246\t\362\300Gw\025\3017\3020\301e\276\236A\210d\270\301\252X\035A.\250\023\301\372\266\034\276\317\036\333\276\353\373\306A\341=_\277\027\346\324@\017\035\210\300\211\030u\3019\266\303\300N^%\277JLo\300\326\324\324\275\203\020Y\257\362\343\215@\3773\271\243\274\342\224\266&\342\r\300\275\223\333\277\231\374\234\204\276\250*\276\256\251l\276\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\231z)\226\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\024s\273\277\024s\273?\000\000\000\000,}B\246\000\000\000\0001\374\362\257\244w \233g\275\231\206\255\350\367\276~\310\272?N\303\r\277\026\334\373@\314\017;\276\213&#\300\325\0131\251\0030Y\277\222\214\014\277\3543\205\300\034\217\340\300\252\252]\274\216\037\325A\276\343\324@\342\212\263\300\210\376G\277U\342\266\300\037\217\002\301\376\370\376\275\220\334\273\300\375\027kA\226\243\317\275\260\305\232A\320\256\276A\013\272\270\301l\265\r\300\374\035\262\301B\027\014\301\262\2143\301\256_\034A**\233A\217\204\226\300\020C\271\277\274m\\A\303\027\"\301<\302\214\300&)!A\021\336\020\301\000A\211\301~_i@\025\217?A\265\231\030\301y\235YA\312#6\301\036\351K@\204T4?\255\311\203A\033\356\356\300\001\216\330\300\375\203,\301N\211\207\300\267^5\300\200\371?A\270\313:\301\332\2318A\210o\300\301\234{\216Ayh5A\3128\322?\364r8\301\\2\232A\242_+\276\024\3304\301c)\257\301P\233-A:\250\005\301\351\036\361@\037I\202\300W\305\027\300\344\006(A\273L\341@\251\331\013A<\263\'A\200c\016A\274\327\014\300#p\274\301Er|A6\000>\300[\266\355\300\310\226l\301Bf\366@l\236\026\301\227\315\277@-\362\225\300\034>X\300hm$?\304S\004A6#\216\301\367\013\010A\'Z\204@\320\255\217@\322g\337?\014)*A\253s\005\300`c\307\300\310\301`\300Z\033\007B\373\263*\302\317Y\204@\210\370\216\276\223\037\237\300\273\317O\301\304\034\352@\341q\304\300\317\344\"@\316,\271?]g\213\300)\217\032\300F\006\345Ar\t\021\3017\312\037\302\230\n\035A\036U~AH\205\232AVru\300\257I\010@\320\023\222\275@\031\371\277\347\321\227@\3640\303\300M\331\237\301\367\010\261A**\017A!B\035@\350\350\211A[D\262\301i\2057Ap\030\363\301L\365\234@\n\033\260@[6\006\301R\"\n\301\362\003\222\300\307\263A\300\241q\253AT+\267\277 ;=\276t\324\204@\"z\242?\206\252\037\277\242\034&\301\306\020\031A\346 \202?H\017{\301\343K\'Ai\360b\301c\204\203@\226\321I\277\031\006\031A\320\240\312@\020e]@\001\001\033\302\237\240\311Au\263m\300\253C\025A\302\215=\300R,9\301\317\r\tA:4\010?^\221\036A4\177\231\277\022\313\253\301MI\215@5P\260AN\225l@\225\n\220\301\302\030e@x\250\217@\355u\236\300\335#\355@z\304\207\300+\377\206\301Z\256[?8\201lA\211}\302@[E\201@\365\3239A\031\316^A>\221\304\301\251x\263\300\374\332\376@\224\030\233\300\275\302\023\3019\344\251@\274XbA)\312\374@\\\344\r\301T\217\312@\373p\273\301\016a\227@\017\200\330@\321cK\300\367\020\326A\r8#\277\247\0169A77\331\300\242\235q@\216hVAg\n\010\302u7\215\301\370~\347@\211T\007\301=\3764A\352\212.@i\213\306\300\317\251\222AnR6?\346S&\301\376m\312\301\310\260 A\242^w?\235\336\363\300\323R\214A\025\277\265A}\363\001\301\027J\024@\243\303\025\301\025\333\204\300\216\365\262@z\t\237\301\3752\232@$T\034@\220\241\217A\315\373\335A\206\344\231\301{\020\001\300\033\347,A\016l\273\300ev\r\301\253\213\336\301\006hj\300,\033U\300\341\037dB$\014Q\301\034&\r\300\212!\023\301Ok\201A\224.\236\301\240\275\037\3015\343?\301\357\343,\300\216\n\010\276=U)B\322w\235\277\370B\333\300\345\253\004\301\035\303\030\301E\244\267\300\010\020\235\300\225\210;\300\224\224\024>\277\306\315\276\017^\022A\206\3106A\300\345\3238\301\246~)A\302\254\023\3013Y\376@\306c5\300&\025#\301yd0A\244?\"A\231\214(@\'\342 \301\232u \301\202\370QAp\231l\301\202\321\026A\272\037\352?#\025\"\301\025x\357A\335\302[>\254)]\301\374\225\217\300>\3124\301\376\034YAR\261\362\300\236\204\257@\255f\204@q/\254@@B\023\301\003bK\300\371M\247\301}\302\217@\207\007\372@[B\022\301\362\234!?Sf\315@\274\334%\301\026\347\026A&8\224\300E\317\022A\356-\025\301\024w\206\277\r\265\rA\300n\030A\331\333\005\301\317\003v@\304\345\363\301\254\346\030AD\345\345\300N\326\263A\3164\t\301\320\322_A\n\r\231\300\237\314=A\031\236\206@\353\023\300A\206\202\310\301\270a\337@\313\322\225\301\250\311\265A\177\201\316\301\204\377\024\301\001&\022A\363/R\300m\001\\\300\317Q9A\276\321S\301\223o\021@\211\377(\301\340\332\264A6M\327\300\273\235@A\252\3743\301\020t\214@M\023\371@P\203\005A\377\265\211\300\000\004\322\300K\336\304\301P\362\036\300*s\256\277%\276xA\r~P@\202\317\304\301\346\370kA\270\016\222A9\203\314\300\256\025\223\276!\373\217\301\376\0101@\376)I\300\0357MA\023\253t@\315\r\251\301\231^\231A\305\301&@4o\"\301\367\342qA\267\250V\301\321+\352@J\260\220@9,\030@\277*\315\300\357\340\214\301\022\213\303@\002\2070A\210\234\233\301\031\203!AX\242\327\300]C>\277\335P\326@\034\230\017\301e\250\234A\2677\000\300\033\375\005\301\354}\262@\007A`@\214;r@\240\275K\300*W\334\277t\030\301\300N8MAx\t\214\300\202leAC\271\373\301.\232\237\301\226=\207A\036P\030A\335\214zA\245\221\270@6\2308?\224\243z\301\252f\205@[\002\376\300\234[\177\300\344_\330\300%\354\200\300\362\313\255A\364\201|A\211/\205@\244\231ZAHH\311\301ky\356\300\306\260\006AG{\r\300\260t\357\275\231M\215A\033\260y\301\265\024\260@D]\247@\247\017\203\301\331\341 A\354\331H\301\221\005KA\347U\234\300\345\310\367\300\3350\252\300\2614\227\277\310+\240@l\220%\277\252P\020A\240\006\361\274X\030\335\300\225\247\014\300\237\310{\300\350\206v\300\250\235\337\277\177\234\217@\261\327\221@\203V\237@\344\321\014\277k\353\251@\366\270\341\300T^\017A\231\201D@\300\037\237@\343q\333@\2248\"\301\003y\001\301h\017\336@%\345\301@\025\2678\301f\262\342\300\212/\232\300\370\3513@\2766\226\300v\255tA\317\251\r\301Z\345)Aw \237?$2\036\300\017%\216A\024)\327\301PS\246?P4\331@ Y\245@\016aJ\301:~\235\301\242\373\030\300\306\350\231A\227\202mA\302\214\217A2q\364\301&\323M@>+\233\300\221\317\206B\002\234\264\301\302\307\237\301\224\221X\301So\253Ar\313\320?\316\245\310\301\005^\365\300\r\023`\277\252\315\224?_\223\204A\375\201\201\277h\357\234\300\267\336\027\300TO\211\300\354\037_?j\247k\300\224\000\274\277(\212Y@:\367\177\277$\340b@0\210u\260\336\335]\300.%\240\276|5\226\277\2138y\277\357\216\220\274\356G\005\254\000\000\000\000\000\000\000\000V\207v\235\365;\030\224x\232\245\277\000\000\000\000\000\000\000\000\242\002{?\344\321\314\275\026\231\323>\000\000\000\000\000\000\000\000\205L\306\271)\035\022=\000\000\000\000\351\362o\272\000\000\000\000\000\000\000\000\004\301\220\227\306\320\014\275F\024\330\277\000\000\000\000\346M\\?\226v\232?\251=\322?\t\257\335\277\035\207\244\250du%@\010\361\177\276@\002(\300\340\332\346\276\233)\303\276@\025\225@\315\217\326A\024\031\003\3013\262\035@\327t\254>d\323|\275\313\2732\301\351X`\301f\331\260@\203\233/\300\207\207NAZ\032\234?\213\355\370@\247\201\312A\231\327;\3013\300\357\277,\016\220\301<\017\223\301\263c\217\300\267~6\301\317R\004B\\\270\027An\2725@s\336\342@\223\352d\301\033\265\373?\250\336\265\300\344\246\224\301\361s\357>o\274\001\302\2729\367@I\336pA\037\3519\300\267_\232A\220\311\201?q\354\232\301{\352\271@\366\277\252@\254\347\247\300\246\365\227\301\031\357tAj\021\335\300\357\225\300@\373/\032A,\200YAJPG\301\3008\363>>\\\266\277\304\025RAJY\002\300\350\363x@\360tU@\"\361F@\0378x\277\276\306\220?g\367\240\300\005Y\354\300\325\302\022\301\254\307\201A\265\214\256?\272\373L>\242\265\220\3018\267\032A\0231S\301U\245GAzK\376\300\357\240\266@%\377\314\300_Z\035A\224t\020\301\222S1A\032Q|\301\334\244f\301!~\036\301\016L\213?\n\017b\3009\352\204A&\201`A \320r@<\214\275=\206\214\275@\t\212\232\301\005\363{\300\271\275\032\3008\227#\301\345D\n@*|TA\344T*A\317\3742\3012\213\237\277\266\212\314A\305\256\276\301\251u\001A\251\023\350A\211B\214\301\235SnA\263K\300\301\033\232\303\301\"\220\027\301(\230\243@\272)l\300vTZA\363?\265\300\037&\277A\376\276\324@\252\232f\300\003\330\032\300\236c\021\301\270b\342@\000\230\241@\206\313\340\300\006L\033A/z3@iA\240\300M\034U\301N\205y\300&@[AM\324!\300J\027j\301\034N\271\300qa$A\000\336\226>\231z\237?\013\002\213?\204\301?\300\220e|@U\242\021A\"B\030@\231-\337\301zC\334?!\354\243A\302^dA\234i\007@j\025{@\353\332\266?\343\010\201\301\202z\023\300\305\373\356\277\000\003\263@\236\231+A\346\206\241@\341\321M\301\350J\007Ay\217\035A\227(3\301%9/\301t\2272\300\234\256z\2777M^A\337\214\201\300&m\376\277\240=\350\300\035|@\3012D\032A\306S\276?Ag\023\300\'_e@\330\323\373\300\273u\201\277)i\374@\344>\005AH\345\301\277\214\3416A\032\356\265?+\324\216\300\3162\213\300\316\271\035\301\000;\242\301\345\330:@\336\203K@\t\2519@\266H\305@\336\273Q\300b\312\032\300$6\320\276(z\006A\262\3453@\"\367\037\301\331\027\206>\005\337\374@\003B7Ai\210\036A\277\263\223\277\221\262,\300\312t\254@\377\320\207\301\364\321\202\300\270\326\200Aa,\301?\223\334b?\014\321\211Ai?\247\301\274\025\035A)b\365\300\253\335\243\300f\347]\300\003 \006\301F\304\261@]\276\316@\357\r\355Az\242\335?8@\017\301\330\304\221A\242x\307@\265\312\375\301\030,\347?\267\263\347\301\257\022\263?8\354XA\357\213\036Bq\310\244\301\330\347{\301-M\027AH%8A=\203q\301Q\356\365>Y\322\304\301>\307\255?\255c\020A)\026\033B\343mS\301\343!\214\301!a\000\301\3150\204A7\345.?i\345\226\301\270\347\013\301\324\203a\277\221\343\202@]\2127\277\373\371\230\277\013\202E\277\376C#\277\242F\253\277\035&\206@\212s \300v\247p\276\217\003\027\277\000\000\000\000\315\302\036@d\244\243\257\000\000\000\000}H\014\277\225\337\253\277\000\000\000\000\000\000\000\000\254\304)\254\000\000\000\000\000\000\000\000\226\024\232\236\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\356\204\257\227\000\000\000\000u\203\202?\000\000\000\000\250\334;\265\322\320\320\275\375\351\024\300\304\207\235\232\273S\310\242\234\200K@\235\356^\231\242\243\342\277\337\311\305\276(-\347<\365\362\252?\177\370.\300\251\333\t@\267\006\032\277\254\203\304\242\224@\003@\020\274\036\277\\\003\235\277s\263\362>j\361F\277\374\344\217\300\344=\350A\265\3657A(bI=\002\263\364\277\026p\315\300\217\237r\301\274\200D\301\310P\364\277\272\"\353\300I\301\346@5R\226@_\262\210\277SI\276Ac8\017\277&\377\242@\273\252v\301\347\313g\301\236\276\014\301Es\353\301fn\013\3006A\353Aq\233\007A\302\034\261?\367\312$\276k\035.@\344\316*A\251\203A\301(<\366@dE\343\300\331\260\274\300\376\374\216\300(\300\240?qiSA?^\027@g\371\302\300\353\301\\\300\323\276\036@r\363\266@zV\221A\302\331\303@F\251\006\301\010FP@3\025\376\300+\274$A\005\031\256\301\261\345g\301i\352\020A\200\320v@\314K\007\277W\251\340@\345?\331\300m/\232\300\270\306wA*E\241\300W\270\002\301\'\317\242>T\365\252\277\320S\334\300\224\364vAp\235XAn\345\346\277gA\216\301\346j\272\277\363\3400A\200\277\330\300\034\232OA\002k\222\301\004\255#A5\221I\300\203\223\254A\273\241\335@8\022\307\301\266#\272@\202#1A\212\307\027\302\314\227\203Af\305\302\300w\344\205A\261\260\024\301\357\326:\300\360\205l\301\032\\FA\344\342.\300r\025\341A\306\345\264\300!\253\201\300\016\261\216\301\302aDA\200\354\177\277J\236\320?\272\356\337\300\322\034\363\301\020\017\370\277\211\215\213A\316J\324@cNA@\262{-\2772\376\334\300\200 NA9e\247Ad,\314?\251av\301\354\367=@1\002\247@ek\257\300\240\332*\301b\374\243\300\213\222f\301\240\r^\301\n\377P@\310\265/@\3354|A\327Z\000\301F\252\230A\000\276\213@\231\356\022\300\324\275\323\300E\3463\301\030\376\315@\3478YA\344\211\316?\212\373\261\300\311\223\026@\256\231\035A\230\362z\300\335N\372\300\332\376\250\300\256_P\301\220(\220A\0222\342@^\310\223>\002\326\343\300\325\333J\300K\000\344@r\037:\301~jq@\361\020\252\277\376\275\370@\002q\002\301ah\031A\255viA\017\251\022\301\272P\231\300-\224\035A\223\270\\\301\324\363\256@\246\3205\301\222\332\236\277x&\303\300\030`B@\250\000\023A\351\035\203\277S\335\013A\232\0237@B\215[\301\360O\272\275\0064\323\277?\352w\301\2373\001\300N\000\314\300X\260\333@\311\335\000\300\364@\355\300\177\r_A\322l\217@\336\300\203>\366\252\370@\337\320\303@\340\016z\300\023\003\311?\246Y\227\300V\356]\301\276WpAE%u\300\226\200H\300\343\303\214\277\266\200\373@\024[\257@\030\205\237\300\245\033\330@\215h\340@\311\324\n@\354\203\031A\'\346\023A\'[5\301)\272_\301.7\037\301\004\312\236A\300\222\313@d\234jA\232\330\365\277\2002\005\302pU\306?I\332\225Ax\374\247\301\335\312fA\027\343\232\301\262\346\030@\021\020\220@D\002ZArX\031A\361U\275\301M{\233A\241\303,A\033\321\n\302C\\\373@\234\024\035\301\003\354\032A\272\277\024A\257E\002A\367]h\300ye\201\301\007L\205A\344\036\217@w+\331\300\372\313.\301\347\324+\301Zi%\300Z\306^A\252\277\030B\240<\275\300_\001-\301\315\271[?Ph\203\300LiK\300\272\206\233\301\335\324\335\300\203\247\302\276q\352\200@\230\n:A\213\237\356\276\342\340\340<\2467\244\3004\332\221\300\357I\244\230\002\354N\300\331\020\365\277\022O\302\241\000\000\000\000\266\033\236?\000\000\000\000\000\000\000\000X\205\200\276\301\364{\277\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\226e$@1`\340\250\216\276\203\275\323\263\355\275\000\000\000\000\367N\277\231p\310\314\256\221\220\220<\235\234\234\275>\026\025\300F\203p\277\305\365\332;\313\202\304@\2216\177\300\"\343\023@\246\350\250=\365\3653\257\322\316\371\276\"S\231\277e\305\366\277\002\206\277\277Z\t\037\277\377y\341@\257\300\240A\341Y\330@<\366\206@\377\2702\300m\357\013\301\332\245N\301\216\3018\301\305!\347@\316\242,\277+\227YA\204_\246A\013_\235?\200\024\215?U%\276\300\021\357\227\301\312\232L\301\247\277\261\300\246\326xA\232\243\006\3028\273OA\016]DA`\340\216\301\341\021\225A3\271\241\300\013\036f\301=\345\007\300U0ZA\245\343I@\252g\341\300\223\277pA!\351\342A\366\020\224\300*G-A\322S\027\301\202\354~\301eO\264\300\215-k\301\204\336\352@\037\013z@\033\337\357AN\001#A\346F\261\301\302\255#\301\264;\231A\263\316\266\301P\370\220A.\320\005\3022\234}A<\357MAl\244\245@R\250\347@2\244\363\301\272\225\373@*U\370\300\36109\300fU\235?0\350\021\301\233\000\317@\241\024\022A$\367CA\243\263\202A\372\325\264\300\001\'VA\332\220;A@s\254\301\332\002\226\301\321\251\272\301xz,A\rK\352@\216\357\rAZ;5\300\212s\265\301\374L^@\365\021\321A\373\311s\301\301\001\336@\315#\266\301\240\201\007A\352\221\337@\311P\311A\210\207\024\276\355\227\330\277\021W\023@\201\261UA\250\217e\301\321<\034\301\270\240\362\3017V\310A;N\036A.\257\322@\031\325\337\301\315\207\370\300\326\220p\277\302\324\313A5\227\226\3000\327\211@\013R\357\301\237\026\223>\216\267\252\300\340\336\326@p\327\367?o\356\214\301~?\375@ \210q\300~\307C@\016\235\235@\007\275\361?\274m\004\300\253i\221\277\331\'\027\301OK\203@\310<\267\300\207\205*\277\205\333\334A/\023\231@Z\234s\301\371J\016\300\236\361\034\277\177_9\301r\376\301@\324\263[>\005\250%A\316\032`@\317\200LA\016\036>\301zZ\301@I\376m\301\257]G\301(\270,AtR\244\277\036\037\320@\021|{A$\370\331@\366\326\267?K\222R\301\207\205q\301\270V\301E\366eA`1\264\277ZL\252\300\241l4\301\235\315\203@\332\231a\274\262\354{@\352\013\305?\226\375I=]>\337\201$QO\277\017\016\335\276q\260T\230=\025C\300|[\234\277\000\000\000\000\000\000\000\000`x\355\243\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\362\360p4\007\373\252\233\207d\256\220\000\000\000\000\000\000\000\000\000\000\000\000\202\'-\206.Bp\264\000\000\000\000\000\000\000\000\225\226\2263\t-\210\2742\226!>\000\000\000\000h\323M\231\372K\\\256\355\250\374\241\226\220\020\276\227\334\321\211\025\023\202\300\374\225\020>\272%(A=\371h?\037:\201=\373\376\361\277I3W\260\034\375c\300\276&\235\277\014\311`\277\021\317\024?\306\246\222?\374\0334A:\000wA\266>\210\300TEQ@\346\327D\300\341\3251\301E\n\377\300 \276\250\300\251\347\261?\254\034\364@J\312\'@\207Q\036Au\037\257\300\256l\222A\362\245?\301\032\267\243\301z7d@4\245\257\300\356\304\306@\323s\302AzL\030?\217EJ\301T\336\373>VN\004Ab\016\216\301kir\301\016A\340?r\035{@\014\252\301@\370ssA0\264-@v>KA\240t\252\301B\265\256@~\202\236\301ZVu\301\'L\215An\213V\300\371\340c@\023\333\013A\242r5A+\351\215@\r:\220\301x\255\256AM_\213?\376\273\313\301\3140\036A\000#\213\301\335;,\301\013\266\"A\006\262\355\300\016|\325@\030\n\202A\033\250\242@\022\336\231@\265\310n\300\301\026\033\301c\3555\301V\335\362>\253@\357\277\000\'lAz\353\231\301\177\200y?\301|\327@=\326\000A\002}\240\301H\377cA*a\202\300\217.tA\003\027eA\243\342\014A\220G\356\300\227\362\353\300\236>c\300\212v.?SR\342\277$\315\334\300Nr@\301\2607JAD\203\327\300a`\001A\337\301\271\300\200\334l?C#\342@+g0A\034\3439\3017\2625@\224b\223\301jFrA#\001\304@\226 9\300\370\002\236\300\362s\263\277\300f7\274\343\004\263A\036R<\300\r\031\236\301y$;\301\027N\300?L\311\302@\036\333\017A\224*L\300\261\177\360\300\230\316+AF\251=A+\'$\301\304\337\214\276\211t\217\3012\013\223\300\345\275\"A=\033m@\361\3324\277p\301\023\301.\n7A\201\361\304AB\263\240\301\244\270\250?\330\006\205\301\214`5\300\216\315EA2\345\010\300\346\rd@\350\377\205@\270\216\354>\234\352\000A\250\346|\300:6\025\301\266\036&\301\3323\364@E\t!\301\265w\343@`\346\010A\323S)\301\037\254\007\301\235\":AU\250\"A\360H\367\300A\331\003\301\237/6\301\361x3\300\357\006\235A9\360\367\300\237V\005@\376\016\025A/E\215A\336;:\301\032J\025@\335\206\213\301\023X\024\301\250e\250A{\341}A=No@\377\003\020\301z\257O@\371\256tA!\322\204\301\014\212\272\300\242\352\223\301D \205\301\275\263\203?\262L\270A)\314\247A\342:\205@\324\002\024\300@\251\361@j\026\310\301h\215!=6MM\3017\024\010\301^\313\237A\256o\331\276js\323\300_\276:\300\303\273\\A\260v\021A\341g\273\301P\330\nA]\210\032\301\203\306\031\301\177\211\237As\2624AD9\014\301\002W\254\300\004\315\252A\232\250\226\300\253#\260\301W\310\364\300\264\226\261@\333\374\\@qaU\301\253\227?A\265h\203\300q\265\337\276\201\313\245AH\353DA\032\221\250\301\313\332\023Au\377\225\301\242\271\274@bz\376\300\372~\300A\023\352=\301I\035\360@(\274\247@S\242G@\036B\227\301 \241\255\277\013\241\267\300;<\251\300\232\270\355@\326\374XA\243vC\301\017A\321\300\346\215HA(\267\250?\345\327\240\277\033\343\267\301i\030WA\221\363\010\300].\236?I:BA!\304\202\300\225\2071\301\242\261\217A\265\033\255\300\026\000\207\277\226\311\334\300\313\241&\277\302\266X@\367\037\351>\202\360\341>U$\021\277\300\332B\276\271\\??\230*\303\273\352\316\361\276tT\204\277\222\2630\300\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000jii4/\243\245\233c\361\250\220\000\000\000\000\000\000\000\000\000\000\000\000F\276\'\206\035\300h\264\000\000\000\000\000\000\000\000\227\226\2263\310\271\325\232\251\375\331\217\000\000\000\000\000\000\000\000\000\000\000\000bqX\205])\226\263\000\000\000\000a\356\021\277\361\256\016>\246A\303@\024\211\215@\000\000\000\000\324\205\224\277If_\255\221A\317\300\357\317D\277\213\025\330\277\363\001Q\300\264\2506\277\007):A+\\\230A\340\225\302\300\364\307\002@\214n\017\300O1\215\3011\256\021\300\243r\376\276\325\214\213\301$\260\306@\370\0271A\234]NA\2327\017\301$lUA\231t\311\300\210e\240\301\224\204(A\336<\247\277\252\314\207@\373J`B\003\354\274@Xs\223\275\247\311q\301\315}\261\301\321>-\301\213\310a\300m\213\324@N\030\251\301 h\005?\005B\311A{W\212A\347\356\205A7\2128\301\325\202(\301\001\213\270\301G\357\270\300\323\332\032?\270z\026\301\367\350\322\277\377\2558@\316\204IAA\371\253@\3322i\301\3022*\276\361\271\016\301\211\034\022A2\344z@\340\217\t\301T;SAo\316\007A\237\2015Ah\0035\301\345\276n\301\033!\237?\223k\217\300\236\246r@\342a7\301\302\201\203@\014\274\033Ax\021\302@\207\033BAex\034AO\230\265\301Z\037 ?@:\245@\370,\310\300\366\241\026\301\030\035\244\300\212~}@X\311$\277\220z\210@\335\013\203\300<\3766\301\020]\003\301\246\3103A \332\345\277\262h\221@\342<\013@z\272.@\277\033.\301`\207\273@\016a\022A\014C*\301\343\245\204@D\222)\300\262\255\020\301`\206NA\226\334\311\277\310\007\272@\242\321\331\301\356\362\336>\271\320\267\300o\235`\301\261#\016A\336\310\021A\017?(A\tb\204@\022\321\001A[\340\000B\353\254c\301\330zO\276Bc\224\300\377[\025\300b\375\225@\265NvA}{\213\301\206\213F\276\223IT\301\234\247!\300:\361_\3009\233I\300\004\333\217\300\300\224\311\300\2359\035A\342j\262\300\357l\374@\376\334\016@t&\263@\2173\312@\351\2421?\002\003\000\300\036\320\272@\233\304\201\301\203\254\320@\307@\232Av\337\036\301\374\305\337@\243\366\213\301\331U\335@(\353\243\277\032g\203\300\\\346\370\277\305Yr\300w\330\322@\001\353\245@e\362\210\300\310[)@.\014\276\300FI\364@%t\212@\'\032\307\300\235r!A\313!\016?(\t\377\3008\n\235@\344\313\353\300\331\300\240@\010\241/\301zf\352\300\004]\211AG\242\260@\325K3@\361\036\307\300\266c;A4\300\357\300\334\n\031\301\272/\261?\331s\377\300l\227\023@\261\256\023A\027\203\277@x\245\014A\252\224\'A\004\215\017Abd\033\300x\265\\\300Y>V\301!\354\323\301S\250\320\300)#\314A\343R\033A\213\372\027A\241Qd\301\313\354\024@\201 aA\305\024\253\301\260\3375\301\203\201\362\300\315\337\315\277\252\311\255@tP\216\300\357\004\304\277\2675\251A\'\005\021A\362)+A\332\322\000\302[\330\274@J;G\301\254\027,A77[\301\177\326VAw\205\"A\006A\361@`;\222@I{\026\301`\355\254\301\326\370\237>\353\222\002\300\365\263\000A\005\204\302\300\2126\233@C\224D\277\250\311\017\300b\022\211A\352\232Z\301\2550\300\301\2627\212\276\356+\210A\241D\276\300L\252`?\374dkA\2361\030\301\201s&\301D\340PA\'*\237\300J\301V\300!\3725\301\213[\207A\330\267\037\300\242\375\t\276\260X\261@mM!\300\034\267\367\300\251\237\003AQ`\316\277#\243<\277K\321\362\277\344\244W@\265\264\264>\314q\371>\353\335]\300<\242\030\275z\273\266>\332\331Y@\310\306F>\300j2\276\330\274\000\277\2770\035\277\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000Z\366v\224\250\013{>\226\264\351@\223\340i\2270\033)\276\000\000\000\000\344\232\244\300\360\023\250\273b\376\016\300\337&\366\277\266\272\343\300g1\230\300_\010\316A\"\373\340\276\317|\250@\000\000\000\000\313hv\301\355\334,\3004\235\250?\201\270n\301\275\323B\301\331U\320?N\261\336A\377\205\232\300{\353\200A\357\036\007\277t\350H\300\207\203\360?k\331=\301\022\001\024\301\212`\252A3D\363\277\224C\236A\021V\014\301\324\3263A\033<5\300z\007^\277\237si\3012\274a\301B~\313\276\321\246\362@\262=\346?\221\365bA\276\214\312\277\353\017\035A\037\002\342\300NA\"@\260\230\\\301\355iQ\301rt\002AO\337\264?\262R\211@%\230tA2\323\213\277E4T\301:\263O\301\252m\206A.\335\374\300d\\+\301\261\371\341@_\r\025>:\262z?\277`?\277\312\014\031@g\313\334@\354.\242\300^I\355\301\rU\207A\010\212\212?L\0179A\242B\030A:\205\203A\327\227\004A\025\264\200\300\252_\330\300\326\004\027\301\211\275\006\301\036a\307?\004\312\225\301\270\220\\A\260V\177\300\031\004u\300\242JN\300v\310\214@\017\325\221AA\264\277\301\0039\227\300\370\224\244\277\342C\223@\231j\004A\'\375\235\300\256\324\002A\377\006rA\026\304\236\277f\335K?M\3745\301\276\t\205\301\324j\004A\200\020\317\300\337\341\234@E\242\224A\250\343\002A8,\277\3011\t\320\300\226t\240?\201S\213@/;d\301\037e\371@\352n\315\276b\315\013A\n\341\344@\026\031+\301\274,SA8\345]\300\330\234\301\300{\337\360\300\210VA\3013\\\264A\307\373<\301\000A\360@F\355E\301\204\243\003\300c\322\025\301v\230\275?\224d,AP\225\035A\2267R\301\0029\326@\030j\037?zr\010A\367\035IA(\365%\301\264\255\341?/\022\275\300t(8\301\234a\373>\262\017\240@\033\206\342@s\212\364\300#E\227\300\333\244\033\300\241\343\350?M`j\301\210\017\021A\030\323\204A/z`\300xX,\301v\357/A\207y\031\300\347\373\356@\027#\204A~T0\301\037[M@\333p.\300\366\301\243\277\327\245p?\314L\216\300\033ie\300Ww\242\300n\020\255\301%\265S\301\227b)A\201\035\205A\n\3517A\203\331\204\301\320 #A/\357,\301\321EnA\370\246\303\277r\275j\301:\300\236@*v\220A\337j\340\300\363/\241@\310\006\t\301\331\351AA\266\321\310\301\344\225\332@\036\204\005AH \213\3018\235<\300A\346,A6\3372A\303\316\251@$\'EA/\022\032A\270\344\002\302W\023\366@6\325w\300ex\024\3017\301\301\3003#\213A\314\016\020\276\323\267:A(9\265@p\003\301=\217\270\003\302\322\201\372@\231;\270@p\320\272\300\3640\257\300\311\320\317A\306\354\225\301w\200M\301O\363\244@G\316\332\277\201`~\301\326\333\323@\346\005\266A\333}\340\300r\261\007\301)o\256A\2543\302\300\335\364+\301\220>wAK\245\222\300x\304\323\301G\337\217@\021\347\254A\276\002\036\301\231\005\003\003@\000\000\000\000C\217\000\276\036\226U\300p\274\302\277\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\210\364\263\275i\3652@b\021)\226F\312L\273\000\000\000\000\2511\034\300\007\245\252\262?\207\207\276\201\205,\265\314\212\217\300o\355\032\301\231\373|AU\005\266\222\230Q\300@\000\000\000\000*\242\204\277\021\322\252\300\'\242\243\277X\317\\?\307\253\310\300\332\240H\301K\n\274A\227\223\267\277z\004\005?\246\"\220\242[m\316@0\252\201\3018\252\243@50w\300\003\322/@\241\276\200\301^4\033A\n\200\002\276`\230\004\300R\273\022\277\242\216\274Al\307\303\301\375\3552A\257k\376\300\306\215\013\300$\343m\301\260\207\233A\'*P@$\364m\277\313(\226\277\004\014\237A1-\223\301\231$;@i>\034\301l\217U\300\307\202\376\300R\201nAw\225,@-h\016@\224\212\237\300\241.-A\333\2204\3010\225\325@k\256\t>\230Y\375\300\236M)?z\203i\277\305\352\002?\326\263\250@\262i\000\301n\211\207@\230I\006A+\217\026\300\325n\014A\\G\223\300w\304:@\367\034\336?y\334\017A<\233VA\367\342?\301\2604\267\300\204Ag@H\005\211\301\224v\260?\355\254\220\300(\362^\277D)fA\r>\270\276\317\262\233@\314\206\214\301TP\205@\355\235\332\277t\274k>_Bz\301g{\322\277H\243\265\300\301h\216A\366\323]A\351\226\303@d2\347\301\204\r\031\300\021r\223ATI\372\277}c\236A>\346/\301\237\213QA\311\336\234@\236N\210\300\304\214\233@+\240\203\301\036\245\031\301\316k\037Ab\0015\301(\335HAZ\324\367\300\326\257\035\301\344_\350@\347\241\352\300L_\210A\0024\213\301\222\263rAd$\256@W>r\301@\360\016@1\265\326\300\262H\260A\344\300\035A\237\243\253\300a\220\241@F\252b\301X)$\300\303S\352&\277> \264\301\360\031)\301\n\375gA\020\313\231\277\234\325e\277M\217\362@^T\240\300\316o\313Af4\325\300dA\005?^+\220@\3432k?\372\252\'A\275-\014\301\023~4AD\361\213@\345J\313\300\223)\217\301\253yn?\257\360\r\301\272h\200@\325KqA\033\376Q\301\023g\200\300\250\305GA\210\202.>\322\362\276\3015\014rA\236\357?@;\271\265\301\362\217!AhU\014\300C\242\031\301\235f\022A\230\021\003?\242\350\316\300\374\254V\301!\345\001B\362;\n@\031\326\304\301<_\306=\345\no@I^\334\276\254\254\020@\261q\257@^Z\375\277rQ\245\300S!\tA:\216@A\363\311\220\301$\334\017\300wg\257A\256\327<@C\352*@\305\344\310>I\333!\300\234\235I\301\371\273\336\276\246\035\377@\212\t#\301ZN\363\300cd\230?\266\341\001\300q=\rA\265h\202A)\310\316\2777CQ\301I\001\352\277=j A\225\226\250\300\364\327\202\300\037\020BA \336\301\277\226\2266@]\343=A\345\364c\277\320\010\267\301\235\333\312\300\236\030bA\347\277\000\277\272qL\300\260h\366A\373\0332\300\037\000\257\300\264\313\001A &\306\276\372\313\025\3019\037\005\301\"Q\016\301\345\344d\276\344K\333?\230k\203A&\'\310\275)9\303?I\334\244\300\316\256\000\274\341)t\300Xu^\300\235\233\334\300\000\000\000\000\307\306\306>\021\021Q@\000\000\000\000\256\236M\220\213\001}\276\000\000\000\000\000\000\000\000\276\2667\277#,,\300\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\013\375\237\204\000\000\000\000\000\000\000\000\376\215\205>\000\000\000\000\000\000\000\000\000\000\000\000`\352\361\276\3327\177\275^C\214>\000\000\000\000\315\337\212\277\334\035\236\277\0208\255\300\000\000\000\000\2253\277@\000\000\000\000\016\314\024@ \305\305\255\027\021\021\277\201\205,\265*P[\300-|g\300\311\n\261\300\265h\334?\313\241\017A\261\304N\223\330\362\271@\001\205\350\300\276JU@\302\341\235\2771\246\007\301\353\350\307\300\350\247\246@\333\3059\3002q\235?\303\004\303\224\207\326\215\277\312\351Y\301f5\331A\024\243\032\301\240C\020\301B\266c\277\377\375\214A\t\226\025\301S\212 @[\265\316\224l\032*\300C^8\300\2537eA\246@.\301\rd\036\301.\302\334\300\265\365\354A\003\031-\301\357I\266@\237\031\242\250\300\211\007\277m\343\361\300\322x4A\363\025Y\301\202\250\244\300\276p\317\301\303\223>B\367\244\340\301\356%\020@\"m;\277<\013\217A\226\277|\301\314\316\253A\203\177\206\301\206.\233\300G\302\r\302\036\357%B\361\324C\301\265\\OA<\214\202\277\366\273\246A\255J\230\301\251\311aAJ\345\315\301\020\026\004\302\334\272\234\301\266\257\347A\245\002\tA\n\255:AKe#\277\332\233\036A\352\303,A\337_\021AF\005\300\301\362U\273\301\210\255\250\301\2054\206A\201\217\010\301\t\367\225A\304`\277\275m5\306A\020r\330??OrA\264Q\020\302\247\\\210\300\242\377\327\300\031\\\006B\3435\245\301e\003vA0\3170@\362\347vA\335\264 \301\n\006*Aq\342\025\302\354\232\362\300r\300n\300\366W\305A\315`\263?e\350\215A\270\310\257\300\013\252]@\030&\256\3000(GAx\360\026\302X\024%\301\377\253\326@\311;RAn-\314\300\024\267\332\277>\263\254\300\260\330#AOn\037A}\342\253A\350\035\014\302\003\263c\301\177\371oA\345\273S@%-\221\300\214\325\257AR4\222\300T%\205@\306b]A\244=\251=5\275\232\301Q\261\270\301M\002\034A\317\227CA\016zK\301>7?A\210\266]\300\275g\270@\303\021EA\363[\323@\326\230\356\3010\367P\301\347\376\354?\003\023SA\302% \301\262\242\213As\013\177\277\215k;\300\331\354\230A{\220\246@y\'\350\301\372\"\001\301l\324\206\301\360\235(AS\242\306\300\022\003\211AD\022\345\277y\237VA\263\300#A\375\t*A\221\221l\301\365\243\354\300\230\303\261\301Ov\242A1\301\240\301:\364\235A\342!\376\277\221\3532@\027\006\346@>\217\203A\342\232\201\301e\035\362\277#\246\206\301\035\212k@\031\327_\301\354d\212A\0300\237\277\312\253\326\300\207q\024Af\335\324A\303V3\301\344\300]\274\277\201W\301\263\304a\301M\375\207\301h\327\271A\332oS\276\030\355n\277\032\242*@2Z\370A\021\031\247\300\006\331\215\271\313\311X\301\"\345\307\300:\305\235\300\023\274\332A\303.\005\247yy\312@\351-\003\300\370\345\325\277>\266\357\277~\233\247\212\334Y\371\300H99\277m\031\242\300\367`\234A\235\0319\275W\346\202\300\311o\343\277\036\342\350?\tf\273\242\000\000\000\000\360\243\215\277\000\000\000\000R@w\300\252\250DA\235\0319\275\014<\215\277\236\336b\276\002p\276\300\000\000\000\000\000\000\000\000\232\2319@\026\363R\275;BJ\276\000\000\000\000\000\000\000\000\"\230>\256\'\003\003\277\340\350\010\300\000\000\000\000\000\000\000\000\336\335]@\000\000\000\000u5Z\220\000\000\000\000\000\000\000\000\000\000\000\000\311\234\034\277\254\2666\300\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000r\2600\275\251\350\005\215\260\307\027>\322\215\303?\330\256^\232\206\014_\206!\177\334\300\201\221~\276\\3\260@\033\205*\246k\206\006\276\031\321\341\261%*@@\004\211\334?\357\027\206\247\026\242\300\206\365\274\220\301[\220!\300;.\200A\326+\264\277\347\020\014\277\226\316\220\275\212_\202@\222\333Q\300\357`\235\300Y\220\273\206>\324h\301\235\305\255\300l\013\321A\014k\006\300\345\205\016\277\256W$\277i\255\201@\023!+\301$8\202\301\000\000\000\000\205\234\212@\263\356\203\2768*\261A\251\"\001\300cI\200\277M\005\227\277H\273\366?k\213s\301\001,\236\301!|\330\274\345\375$A\242\240\301>\340\317\324A\017\n\317\300\217\320\222\277)\200>\300\366\317\366?\002Z\302\301\362\324!\301\367{\204\276=\316\246A\266m\001\301\221\221\364A\"\245\355\300\n\275\330\300 \023\227\277\336\3231A\251Y\340\301\246r`?E\247\305\276\370\205uA\312\341_\301s\272\363A\375\231\240\300_g\233\300J\202\344\300\205HbA\341D\360\301:\006\213A-1[\277\346\017\003A\031e\231\300\203\314OA\266\265\"\300\233)\000\301\014\021\020\300\231\243\030A\014\210\335\301U\355oA\3323\261\277\226\237\220\300%cb\300\3324\313A\276\233\336\300\242`Z\3014f\310\300\322\304\'AUX\362\301\r\031\003A\030\302\265\277V\222\250A\021Vj\300\305\356\263A\013\217\032\301\"\177\365\300\203\355]\300=\243\024A\362)\"\302N1\277@\301\222C\277V\252\250A\222\303V\301`\267\034B\366o\307\300T\226\367\276\243o\266\300f\223\010Awi\013\302\371\336\211A2\014t\275\273\006\300A\362\"\250\301\314\226\224A\345\252\203\300\025\025\005\300{\275\013\301\t#\207A(Y\007\302\206^\351@(Z\r\275\036\312wA%MS\301\360\253\262A:\243\377\300<\311\303\277\347\276\033\301\204\265\211Av\224\t\302\246\307!=h\232\315\273\247\362\254Ax\300\201\300\246\261\226A\337\024\231\300\220\037\247\300t\276\247\300\214:\'A\001\003\017\302\024\201\017\301\203\202\202\276\217\330\251Ae4\037\300\013>\370A\027+&\277QGE\300w\363\364\276/\262y@\272\346\343\301\270\340\267\275\000\377\376\276:b\212A\254\205\354\300\214\321\233AP\340\204\300\376\334\245\276i\204\276\277\206\354\257\276\200\036\204\301su\362@\234\233\233\276\263\221\225\277\035\206]\300\373p\241A\'-\226\300\2160\330\271mZ\331\277\004\342\244\277\205\206=\301\227\364\251@\302\300\300\273\312Z\324\277Y\342d\276\203\321\200ALW*\300\222\317\231\270\200SE\277\306\304D\276Fp\342\300\205\300\350?\000\000\000\000\353\272\316@\032VS\266.\004\033@\351\375\336\275\000\000\000\000>\337j\221\376A\033\260\036\214W\277\343L3=\000\000\000\000\324\335\023\276\276\202m\222A\227\206?\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000q>\027\276-\t\311\262\000\000\000\000\252Q\213\274\000\000\000\000\251\250(>\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\266\274<>\000\000\000\000\266\274<\276\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\367\354\354>\000\000\000\000\370\354\354\276\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000_bb\277\000\000\000\000\000\000\000\000\371\234\036>\000\000\000\000 \273:?\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\317\320\240\300\000\000\000\000\000\000\000\000y\016n\277\000\000\000\000\236\222\276@\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000jN\256\300\000\000\000\000\000\000\000\000m\267\210?\000\000\000\000\221 \214@\000\000\000\000\000\000\000\000\000\000\000\000A=\037\254C\233\n\300\000\000\000\000\000\000\000\0008EH\276\000\000\000\000\227\037\027@\000\000\000\000\252e\332\264\027{\367\276\331\206*\257\326x\257\300\260\366\344>\000\000\000\000\275P\212@\013P\361\277\303HE@\000\000\000\000k)\240\264h\202\004\276M\3701\266\340$L\300*\t\200?\000\000\000\0005\355\031@\036\210_\276k<\007>\000\000\000\000\262\271T\277]\366M\233\036\033\265\266>D@\300Mq\021?\000\000\000\000\334\3220;N\032\016\277\315pt@\376\374\374\275\333\177-\277\002\251\033\276b\016X\265V\340\027\300\233<\007\276\233]\310\274\203W.A~\314\347\276\233\276\336\300\007\007\007\277[l\213\251\266[\312\276\037\230\343\242\\\006\"\301\006yD\276\330<\202\275+\253SA\201\200\200\276\3158\326\277\325\323S\277:\333\244\211i\016\371\274\221\220\220\274f\007\256\300\260&\203\235\342J\240\273\013\004\023A\306\305\305\276S\360\036\300\240\237\037\277\000\000\000\000\365\025T\242\020\020\020\277(\271z\300$\031\213\257\000\000\000\000\313\026\233@\306\330\330\276SJ.?7\016i\235\342\221d\277\272\213>\242\325\324\324\276\246\343R\300\375\277}\257\000\000\000\000A\353\002A\346\322\311\276\363\317K\300\354\352j\276\202\3649\300\304M?\221\010\004\242\232\364\3069\300\3508j\263\000\000\000\000\330i3Ax\214\311\2739l\245\300\377\375\375\276\257\360\367\274\007\222_\274ks!\252w\325\223\277\013v\267\276\023\357\204\206=\351#A\277\333\353\270\226\016\003\301\203\202\202\276\000\000\000\000\305(\t\277ks!\252\274\216\343\245;\014\341\277\023\357\204\206\326\030\003@ h\315\247\'\037\000?\356\253t\277\000\000\000\000\255\242g\276\331\245\333\211\035LF\277\355\346D\277\000\000\000\000\361\312\337?\262\177\351\234\3001z?\232\252D\300\000\000\000\000\356\343g\277\000\000\000\000\021d\037\300~Z\231\263\000\000\000\000$\375\236@\244x\321\224\276\032\300?\336\2413\277\000\000\000\000Y3\275\222\000\000\000\000\333\225\021\277\'\022\214\262\000\000\000\000XA\n@\375S\277\223\240\315c\277\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" - } - } - } -} -node { - name: "Variable_1" - op: "Const" - attr { - key: "dtype" - value { - type: DT_FLOAT - } - } - attr { - key: "value" - value { - tensor { - dtype: DT_FLOAT - tensor_shape { - dim { - size: 10 - } - } - tensor_content: "\372\214\372\301\212\364\261A\274V\372A\2253S\301x\360!\276\313\275\256B3G \301\017k\034B\363:\320\302\277\235\251\301" - } - } - } -} -node { - name: "MatMul" - op: "MatMul" - input: "Placeholder" - input: "Variable" - attr { - key: "T" - value { - type: DT_FLOAT - } - } - attr { - key: "transpose_a" - value { - b: false - } - } - attr { - key: "transpose_b" - value { - b: false - } - } -} -node { - name: "add" - op: "Add" - input: "MatMul" - input: "Variable_1" - attr { - key: "T" - value { - type: DT_FLOAT - } - } -} -node { - name: "Softmax" - op: "Softmax" - input: "add" - attr { - key: "T" - value { - type: DT_FLOAT - } - } -} -library { -} diff --git a/tests/TfMobileNet-Armnn/TfMobileNet-Armnn.cpp b/tests/TfMobileNet-Armnn/TfMobileNet-Armnn.cpp new file mode 100644 index 0000000..e1aebb6 --- /dev/null +++ b/tests/TfMobileNet-Armnn/TfMobileNet-Armnn.cpp @@ -0,0 +1,50 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// +#include "../InferenceTest.hpp" +#include "../MobileNetDatabase.hpp" +#include "armnnTfParser/ITfParser.hpp" + +int main(int argc, char* argv[]) +{ + std::vector imageSet = + { + {"Dog.jpg", 209}, + // top five predictions in tensorflow: + // ----------------------------------- + // 209:Labrador retriever 0.949995 + // 160:Rhodesian ridgeback 0.0270182 + // 208:golden retriever 0.0192866 + // 853:tennis ball 0.000470382 + // 239:Greater Swiss Mountain dog 0.000464451 + {"Cat.jpg", 283}, + // top five predictions in tensorflow: + // ----------------------------------- + // 283:tiger cat 0.579016 + // 286:Egyptian cat 0.319676 + // 282:tabby, tabby cat 0.0873346 + // 288:lynx, catamount 0.011163 + // 289:leopard, Panthera pardus 0.000856755 + {"shark.jpg", 3}, + // top five predictions in tensorflow: + // ----------------------------------- + // 3:great white shark, white shark, ... 0.996926 + // 4:tiger shark, Galeocerdo cuvieri 0.00270528 + // 149:killer whale, killer, orca, ... 0.000121848 + // 395:sturgeon 7.78977e-05 + // 5:hammerhead, hammerhead shark 6.44127e-055 + }; + + armnn::TensorShape inputTensorShape({ 1, 224, 224, 3 }); + return armnn::test::ClassifierInferenceTestMain( + argc, argv, "mobilenet_v1_1.0_224_fp32.pb", true, "input", "output", { 0, 1, 2 }, + [&imageSet](const char* dataDir) { + return MobileNetDatabase( + dataDir, + 224, + 224, + imageSet); + }, + &inputTensorShape); +} diff --git a/tests/TfMobileNet-Armnn/Validation.txt b/tests/TfMobileNet-Armnn/Validation.txt new file mode 100644 index 0000000..94a11bd --- /dev/null +++ b/tests/TfMobileNet-Armnn/Validation.txt @@ -0,0 +1,201 @@ +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 +209 +283 +3 \ No newline at end of file diff --git a/tests/TfMobileNet-Armnn/labels.txt b/tests/TfMobileNet-Armnn/labels.txt new file mode 100644 index 0000000..d74ff55 --- /dev/null +++ b/tests/TfMobileNet-Armnn/labels.txt @@ -0,0 +1,1001 @@ +0:background +1:tench, Tinca tinca +2:goldfish, Carassius auratus +3:great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias +4:tiger shark, Galeocerdo cuvieri +5:hammerhead, hammerhead shark +6:electric ray, crampfish, numbfish, torpedo +7:stingray +8:cock +9:hen +10:ostrich, Struthio camelus +11:brambling, Fringilla montifringilla +12:goldfinch, Carduelis carduelis +13:house finch, linnet, Carpodacus mexicanus +14:junco, snowbird +15:indigo bunting, indigo finch, indigo bird, Passerina cyanea +16:robin, American robin, Turdus migratorius +17:bulbul +18:jay +19:magpie +20:chickadee +21:water ouzel, dipper +22:kite +23:bald eagle, American eagle, Haliaeetus leucocephalus +24:vulture +25:great grey owl, great gray owl, Strix nebulosa +26:European fire salamander, Salamandra salamandra +27:common newt, Triturus vulgaris +28:eft +29:spotted salamander, Ambystoma maculatum +30:axolotl, mud puppy, Ambystoma mexicanum +31:bullfrog, Rana catesbeiana +32:tree frog, tree-frog +33:tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui +34:loggerhead, loggerhead turtle, Caretta caretta +35:leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea +36:mud turtle +37:terrapin +38:box turtle, box tortoise +39:banded gecko +40:common iguana, iguana, Iguana iguana +41:American chameleon, anole, Anolis carolinensis +42:whiptail, whiptail lizard +43:agama +44:frilled lizard, Chlamydosaurus kingi +45:alligator lizard +46:Gila monster, Heloderma suspectum +47:green lizard, Lacerta viridis +48:African chameleon, Chamaeleo chamaeleon +49:Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis +50:African crocodile, Nile crocodile, Crocodylus niloticus +51:American alligator, Alligator mississipiensis +52:triceratops +53:thunder snake, worm snake, Carphophis amoenus +54:ringneck snake, ring-necked snake, ring snake +55:hognose snake, puff adder, sand viper +56:green snake, grass snake +57:king snake, kingsnake +58:garter snake, grass snake +59:water snake +60:vine snake +61:night snake, Hypsiglena torquata +62:boa constrictor, Constrictor constrictor +63:rock python, rock snake, Python sebae +64:Indian cobra, Naja naja +65:green mamba +66:sea snake +67:horned viper, cerastes, sand viper, horned asp, Cerastes cornutus +68:diamondback, diamondback rattlesnake, Crotalus adamanteus +69:sidewinder, horned rattlesnake, Crotalus cerastes +70:trilobite +71:harvestman, daddy longlegs, Phalangium opilio +72:scorpion +73:black and gold garden spider, Argiope aurantia +74:barn spider, Araneus cavaticus +75:garden spider, Aranea diademata +76:black widow, Latrodectus mactans +77:tarantula +78:wolf spider, hunting spider +79:tick +80:centipede +81:black grouse +82:ptarmigan +83:ruffed grouse, partridge, Bonasa umbellus +84:prairie chicken, prairie grouse, prairie fowl +85:peacock +86:quail +87:partridge +88:African grey, African gray, Psittacus erithacus +89:macaw +90:sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita +91:lorikeet +92:coucal +93:bee eater +94:hornbill +95:hummingbird +96:jacamar +97:toucan +98:drake +99:red-breasted merganser, Mergus serrator +100:goose +101:black swan, Cygnus atratus +102:tusker +103:echidna, spiny anteater, anteater +104:platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus +105:wallaby, brush kangaroo +106:koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus +107:wombat +108:jellyfish +109:sea anemone, anemone +110:brain coral +111:flatworm, platyhelminth +112:nematode, nematode worm, roundworm +113:conch +114:snail +115:slug +116:sea slug, nudibranch +117:chiton, coat-of-mail shell, sea cradle, polyplacophore +118:chambered nautilus, pearly nautilus, nautilus +119:Dungeness crab, Cancer magister +120:rock crab, Cancer irroratus +121:fiddler crab +122:king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica +123:American lobster, Northern lobster, Maine lobster, Homarus americanus +124:spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish +125:crayfish, crawfish, crawdad, crawdaddy +126:hermit crab +127:isopod +128:white stork, Ciconia ciconia +129:black stork, Ciconia nigra +130:spoonbill +131:flamingo +132:little blue heron, Egretta caerulea +133:American egret, great white heron, Egretta albus +134:bittern +135:crane +136:limpkin, Aramus pictus +137:European gallinule, Porphyrio porphyrio +138:American coot, marsh hen, mud hen, water hen, Fulica americana +139:bustard +140:ruddy turnstone, Arenaria interpres +141:red-backed sandpiper, dunlin, Erolia alpina +142:redshank, Tringa totanus +143:dowitcher +144:oystercatcher, oyster catcher +145:pelican +146:king penguin, Aptenodytes patagonica +147:albatross, mollymawk +148:grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus +149:killer whale, killer, orca, grampus, sea wolf, Orcinus orca +150:dugong, Dugong dugon +151:sea lion +152:Chihuahua +153:Japanese spaniel +154:Maltese dog, Maltese terrier, Maltese +155:Pekinese, Pekingese, Peke +156:Shih-Tzu +157:Blenheim spaniel +158:papillon +159:toy terrier +160:Rhodesian ridgeback +161:Afghan hound, Afghan +162:basset, basset hound +163:beagle +164:bloodhound, sleuthhound +165:bluetick +166:black-and-tan coonhound +167:Walker hound, Walker foxhound +168:English foxhound +169:redbone +170:borzoi, Russian wolfhound +171:Irish wolfhound +172:Italian greyhound +173:whippet +174:Ibizan hound, Ibizan Podenco +175:Norwegian elkhound, elkhound +176:otterhound, otter hound +177:Saluki, gazelle hound +178:Scottish deerhound, deerhound +179:Weimaraner +180:Staffordshire bullterrier, Staffordshire bull terrier +181:American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier +182:Bedlington terrier +183:Border terrier +184:Kerry blue terrier +185:Irish terrier +186:Norfolk terrier +187:Norwich terrier +188:Yorkshire terrier +189:wire-haired fox terrier +190:Lakeland terrier +191:Sealyham terrier, Sealyham +192:Airedale, Airedale terrier +193:cairn, cairn terrier +194:Australian terrier +195:Dandie Dinmont, Dandie Dinmont terrier +196:Boston bull, Boston terrier +197:miniature schnauzer +198:giant schnauzer +199:standard schnauzer +200:Scotch terrier, Scottish terrier, Scottie +201:Tibetan terrier, chrysanthemum dog +202:silky terrier, Sydney silky +203:soft-coated wheaten terrier +204:West Highland white terrier +205:Lhasa, Lhasa apso +206:flat-coated retriever +207:curly-coated retriever +208:golden retriever +209:Labrador retriever +210:Chesapeake Bay retriever +211:German short-haired pointer +212:vizsla, Hungarian pointer +213:English setter +214:Irish setter, red setter +215:Gordon setter +216:Brittany spaniel +217:clumber, clumber spaniel +218:English springer, English springer spaniel +219:Welsh springer spaniel +220:cocker spaniel, English cocker spaniel, cocker +221:Sussex spaniel +222:Irish water spaniel +223:kuvasz +224:schipperke +225:groenendael +226:malinois +227:briard +228:kelpie +229:komondor +230:Old English sheepdog, bobtail +231:Shetland sheepdog, Shetland sheep dog, Shetland +232:collie +233:Border collie +234:Bouvier des Flandres, Bouviers des Flandres +235:Rottweiler +236:German shepherd, German shepherd dog, German police dog, alsatian +237:Doberman, Doberman pinscher +238:miniature pinscher +239:Greater Swiss Mountain dog +240:Bernese mountain dog +241:Appenzeller +242:EntleBucher +243:boxer +244:bull mastiff +245:Tibetan mastiff +246:French bulldog +247:Great Dane +248:Saint Bernard, St Bernard +249:Eskimo dog, husky +250:malamute, malemute, Alaskan malamute +251:Siberian husky +252:dalmatian, coach dog, carriage dog +253:affenpinscher, monkey pinscher, monkey dog +254:basenji +255:pug, pug-dog +256:Leonberg +257:Newfoundland, Newfoundland dog +258:Great Pyrenees +259:Samoyed, Samoyede +260:Pomeranian +261:chow, chow chow +262:keeshond +263:Brabancon griffon +264:Pembroke, Pembroke Welsh corgi +265:Cardigan, Cardigan Welsh corgi +266:toy poodle +267:miniature poodle +268:standard poodle +269:Mexican hairless +270:timber wolf, grey wolf, gray wolf, Canis lupus +271:white wolf, Arctic wolf, Canis lupus tundrarum +272:red wolf, maned wolf, Canis rufus, Canis niger +273:coyote, prairie wolf, brush wolf, Canis latrans +274:dingo, warrigal, warragal, Canis dingo +275:dhole, Cuon alpinus +276:African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus +277:hyena, hyaena +278:red fox, Vulpes vulpes +279:kit fox, Vulpes macrotis +280:Arctic fox, white fox, Alopex lagopus +281:grey fox, gray fox, Urocyon cinereoargenteus +282:tabby, tabby cat +283:tiger cat +284:Persian cat +285:Siamese cat, Siamese +286:Egyptian cat +287:cougar, puma, catamount, mountain lion, painter, panther, Felis concolor +288:lynx, catamount +289:leopard, Panthera pardus +290:snow leopard, ounce, Panthera uncia +291:jaguar, panther, Panthera onca, Felis onca +292:lion, king of beasts, Panthera leo +293:tiger, Panthera tigris +294:cheetah, chetah, Acinonyx jubatus +295:brown bear, bruin, Ursus arctos +296:American black bear, black bear, Ursus americanus, Euarctos americanus +297:ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus +298:sloth bear, Melursus ursinus, Ursus ursinus +299:mongoose +300:meerkat, mierkat +301:tiger beetle +302:ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle +303:ground beetle, carabid beetle +304:long-horned beetle, longicorn, longicorn beetle +305:leaf beetle, chrysomelid +306:dung beetle +307:rhinoceros beetle +308:weevil +309:fly +310:bee +311:ant, emmet, pismire +312:grasshopper, hopper +313:cricket +314:walking stick, walkingstick, stick insect +315:cockroach, roach +316:mantis, mantid +317:cicada, cicala +318:leafhopper +319:lacewing, lacewing fly +320:dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk +321:damselfly +322:admiral +323:ringlet, ringlet butterfly +324:monarch, monarch butterfly, milkweed butterfly, Danaus plexippus +325:cabbage butterfly +326:sulphur butterfly, sulfur butterfly +327:lycaenid, lycaenid butterfly +328:starfish, sea star +329:sea urchin +330:sea cucumber, holothurian +331:wood rabbit, cottontail, cottontail rabbit +332:hare +333:Angora, Angora rabbit +334:hamster +335:porcupine, hedgehog +336:fox squirrel, eastern fox squirrel, Sciurus niger +337:marmot +338:beaver +339:guinea pig, Cavia cobaya +340:sorrel +341:zebra +342:hog, pig, grunter, squealer, Sus scrofa +343:wild boar, boar, Sus scrofa +344:warthog +345:hippopotamus, hippo, river horse, Hippopotamus amphibius +346:ox +347:water buffalo, water ox, Asiatic buffalo, Bubalus bubalis +348:bison +349:ram, tup +350:bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis +351:ibex, Capra ibex +352:hartebeest +353:impala, Aepyceros melampus +354:gazelle +355:Arabian camel, dromedary, Camelus dromedarius +356:llama +357:weasel +358:mink +359:polecat, fitch, foulmart, foumart, Mustela putorius +360:black-footed ferret, ferret, Mustela nigripes +361:otter +362:skunk, polecat, wood pussy +363:badger +364:armadillo +365:three-toed sloth, ai, Bradypus tridactylus +366:orangutan, orang, orangutang, Pongo pygmaeus +367:gorilla, Gorilla gorilla +368:chimpanzee, chimp, Pan troglodytes +369:gibbon, Hylobates lar +370:siamang, Hylobates syndactylus, Symphalangus syndactylus +371:guenon, guenon monkey +372:patas, hussar monkey, Erythrocebus patas +373:baboon +374:macaque +375:langur +376:colobus, colobus monkey +377:proboscis monkey, Nasalis larvatus +378:marmoset +379:capuchin, ringtail, Cebus capucinus +380:howler monkey, howler +381:titi, titi monkey +382:spider monkey, Ateles geoffroyi +383:squirrel monkey, Saimiri sciureus +384:Madagascar cat, ring-tailed lemur, Lemur catta +385:indri, indris, Indri indri, Indri brevicaudatus +386:Indian elephant, Elephas maximus +387:African elephant, Loxodonta africana +388:lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens +389:giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca +390:barracouta, snoek +391:eel +392:coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch +393:rock beauty, Holocanthus tricolor +394:anemone fish +395:sturgeon +396:gar, garfish, garpike, billfish, Lepisosteus osseus +397:lionfish +398:puffer, pufferfish, blowfish, globefish +399:abacus +400:abaya +401:academic gown, academic robe, judge's robe +402:accordion, piano accordion, squeeze box +403:acoustic guitar +404:aircraft carrier, carrier, flattop, attack aircraft carrier +405:airliner +406:airship, dirigible +407:altar +408:ambulance +409:amphibian, amphibious vehicle +410:analog clock +411:apiary, bee house +412:apron +413:ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin +414:assault rifle, assault gun +415:backpack, back pack, knapsack, packsack, rucksack, haversack +416:bakery, bakeshop, bakehouse +417:balance beam, beam +418:balloon +419:ballpoint, ballpoint pen, ballpen, Biro +420:Band Aid +421:banjo +422:bannister, banister, balustrade, balusters, handrail +423:barbell +424:barber chair +425:barbershop +426:barn +427:barometer +428:barrel, cask +429:barrow, garden cart, lawn cart, wheelbarrow +430:baseball +431:basketball +432:bassinet +433:bassoon +434:bathing cap, swimming cap +435:bath towel +436:bathtub, bathing tub, bath, tub +437:beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon +438:beacon, lighthouse, beacon light, pharos +439:beaker +440:bearskin, busby, shako +441:beer bottle +442:beer glass +443:bell cote, bell cot +444:bib +445:bicycle-built-for-two, tandem bicycle, tandem +446:bikini, two-piece +447:binder, ring-binder +448:binoculars, field glasses, opera glasses +449:birdhouse +450:boathouse +451:bobsled, bobsleigh, bob +452:bolo tie, bolo, bola tie, bola +453:bonnet, poke bonnet +454:bookcase +455:bookshop, bookstore, bookstall +456:bottlecap +457:bow +458:bow tie, bow-tie, bowtie +459:brass, memorial tablet, plaque +460:brassiere, bra, bandeau +461:breakwater, groin, groyne, mole, bulwark, seawall, jetty +462:breastplate, aegis, egis +463:broom +464:bucket, pail +465:buckle +466:bulletproof vest +467:bullet train, bullet +468:butcher shop, meat market +469:cab, hack, taxi, taxicab +470:caldron, cauldron +471:candle, taper, wax light +472:cannon +473:canoe +474:can opener, tin opener +475:cardigan +476:car mirror +477:carousel, carrousel, merry-go-round, roundabout, whirligig +478:carpenter's kit, tool kit +479:carton +480:car wheel +481:cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM +482:cassette +483:cassette player +484:castle +485:catamaran +486:CD player +487:cello, violoncello +488:cellular telephone, cellular phone, cellphone, cell, mobile phone +489:chain +490:chainlink fence +491:chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour +492:chain saw, chainsaw +493:chest +494:chiffonier, commode +495:chime, bell, gong +496:china cabinet, china closet +497:Christmas stocking +498:church, church building +499:cinema, movie theater, movie theatre, movie house, picture palace +500:cleaver, meat cleaver, chopper +501:cliff dwelling +502:cloak +503:clog, geta, patten, sabot +504:cocktail shaker +505:coffee mug +506:coffeepot +507:coil, spiral, volute, whorl, helix +508:combination lock +509:computer keyboard, keypad +510:confectionery, confectionary, candy store +511:container ship, containership, container vessel +512:convertible +513:corkscrew, bottle screw +514:cornet, horn, trumpet, trump +515:cowboy boot +516:cowboy hat, ten-gallon hat +517:cradle +518:crane +519:crash helmet +520:crate +521:crib, cot +522:Crock Pot +523:croquet ball +524:crutch +525:cuirass +526:dam, dike, dyke +527:desk +528:desktop computer +529:dial telephone, dial phone +530:diaper, nappy, napkin +531:digital clock +532:digital watch +533:dining table, board +534:dishrag, dishcloth +535:dishwasher, dish washer, dishwashing machine +536:disk brake, disc brake +537:dock, dockage, docking facility +538:dogsled, dog sled, dog sleigh +539:dome +540:doormat, welcome mat +541:drilling platform, offshore rig +542:drum, membranophone, tympan +543:drumstick +544:dumbbell +545:Dutch oven +546:electric fan, blower +547:electric guitar +548:electric locomotive +549:entertainment center +550:envelope +551:espresso maker +552:face powder +553:feather boa, boa +554:file, file cabinet, filing cabinet +555:fireboat +556:fire engine, fire truck +557:fire screen, fireguard +558:flagpole, flagstaff +559:flute, transverse flute +560:folding chair +561:football helmet +562:forklift +563:fountain +564:fountain pen +565:four-poster +566:freight car +567:French horn, horn +568:frying pan, frypan, skillet +569:fur coat +570:garbage truck, dustcart +571:gasmask, respirator, gas helmet +572:gas pump, gasoline pump, petrol pump, island dispenser +573:goblet +574:go-kart +575:golf ball +576:golfcart, golf cart +577:gondola +578:gong, tam-tam +579:gown +580:grand piano, grand +581:greenhouse, nursery, glasshouse +582:grille, radiator grille +583:grocery store, grocery, food market, market +584:guillotine +585:hair slide +586:hair spray +587:half track +588:hammer +589:hamper +590:hand blower, blow dryer, blow drier, hair dryer, hair drier +591:hand-held computer, hand-held microcomputer +592:handkerchief, hankie, hanky, hankey +593:hard disc, hard disk, fixed disk +594:harmonica, mouth organ, harp, mouth harp +595:harp +596:harvester, reaper +597:hatchet +598:holster +599:home theater, home theatre +600:honeycomb +601:hook, claw +602:hoopskirt, crinoline +603:horizontal bar, high bar +604:horse cart, horse-cart +605:hourglass +606:iPod +607:iron, smoothing iron +608:jack-o'-lantern +609:jean, blue jean, denim +610:jeep, landrover +611:jersey, T-shirt, tee shirt +612:jigsaw puzzle +613:jinrikisha, ricksha, rickshaw +614:joystick +615:kimono +616:knee pad +617:knot +618:lab coat, laboratory coat +619:ladle +620:lampshade, lamp shade +621:laptop, laptop computer +622:lawn mower, mower +623:lens cap, lens cover +624:letter opener, paper knife, paperknife +625:library +626:lifeboat +627:lighter, light, igniter, ignitor +628:limousine, limo +629:liner, ocean liner +630:lipstick, lip rouge +631:Loafer +632:lotion +633:loudspeaker, speaker, speaker unit, loudspeaker system, speaker system +634:loupe, jeweler's loupe +635:lumbermill, sawmill +636:magnetic compass +637:mailbag, postbag +638:mailbox, letter box +639:maillot +640:maillot, tank suit +641:manhole cover +642:maraca +643:marimba, xylophone +644:mask +645:matchstick +646:maypole +647:maze, labyrinth +648:measuring cup +649:medicine chest, medicine cabinet +650:megalith, megalithic structure +651:microphone, mike +652:microwave, microwave oven +653:military uniform +654:milk can +655:minibus +656:miniskirt, mini +657:minivan +658:missile +659:mitten +660:mixing bowl +661:mobile home, manufactured home +662:Model T +663:modem +664:monastery +665:monitor +666:moped +667:mortar +668:mortarboard +669:mosque +670:mosquito net +671:motor scooter, scooter +672:mountain bike, all-terrain bike, off-roader +673:mountain tent +674:mouse, computer mouse +675:mousetrap +676:moving van +677:muzzle +678:nail +679:neck brace +680:necklace +681:nipple +682:notebook, notebook computer +683:obelisk +684:oboe, hautboy, hautbois +685:ocarina, sweet potato +686:odometer, hodometer, mileometer, milometer +687:oil filter +688:organ, pipe organ +689:oscilloscope, scope, cathode-ray oscilloscope, CRO +690:overskirt +691:oxcart +692:oxygen mask +693:packet +694:paddle, boat paddle +695:paddlewheel, paddle wheel +696:padlock +697:paintbrush +698:pajama, pyjama, pj's, jammies +699:palace +700:panpipe, pandean pipe, syrinx +701:paper towel +702:parachute, chute +703:parallel bars, bars +704:park bench +705:parking meter +706:passenger car, coach, carriage +707:patio, terrace +708:pay-phone, pay-station +709:pedestal, plinth, footstall +710:pencil box, pencil case +711:pencil sharpener +712:perfume, essence +713:Petri dish +714:photocopier +715:pick, plectrum, plectron +716:pickelhaube +717:picket fence, paling +718:pickup, pickup truck +719:pier +720:piggy bank, penny bank +721:pill bottle +722:pillow +723:ping-pong ball +724:pinwheel +725:pirate, pirate ship +726:pitcher, ewer +727:plane, carpenter's plane, woodworking plane +728:planetarium +729:plastic bag +730:plate rack +731:plow, plough +732:plunger, plumber's helper +733:Polaroid camera, Polaroid Land camera +734:pole +735:police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria +736:poncho +737:pool table, billiard table, snooker table +738:pop bottle, soda bottle +739:pot, flowerpot +740:potter's wheel +741:power drill +742:prayer rug, prayer mat +743:printer +744:prison, prison house +745:projectile, missile +746:projector +747:puck, hockey puck +748:punching bag, punch bag, punching ball, punchball +749:purse +750:quill, quill pen +751:quilt, comforter, comfort, puff +752:racer, race car, racing car +753:racket, racquet +754:radiator +755:radio, wireless +756:radio telescope, radio reflector +757:rain barrel +758:recreational vehicle, RV, R.V. +759:reel +760:reflex camera +761:refrigerator, icebox +762:remote control, remote +763:restaurant, eating house, eating place, eatery +764:revolver, six-gun, six-shooter +765:rifle +766:rocking chair, rocker +767:rotisserie +768:rubber eraser, rubber, pencil eraser +769:rugby ball +770:rule, ruler +771:running shoe +772:safe +773:safety pin +774:saltshaker, salt shaker +775:sandal +776:sarong +777:sax, saxophone +778:scabbard +779:scale, weighing machine +780:school bus +781:schooner +782:scoreboard +783:screen, CRT screen +784:screw +785:screwdriver +786:seat belt, seatbelt +787:sewing machine +788:shield, buckler +789:shoe shop, shoe-shop, shoe store +790:shoji +791:shopping basket +792:shopping cart +793:shovel +794:shower cap +795:shower curtain +796:ski +797:ski mask +798:sleeping bag +799:slide rule, slipstick +800:sliding door +801:slot, one-armed bandit +802:snorkel +803:snowmobile +804:snowplow, snowplough +805:soap dispenser +806:soccer ball +807:sock +808:solar dish, solar collector, solar furnace +809:sombrero +810:soup bowl +811:space bar +812:space heater +813:space shuttle +814:spatula +815:speedboat +816:spider web, spider's web +817:spindle +818:sports car, sport car +819:spotlight, spot +820:stage +821:steam locomotive +822:steel arch bridge +823:steel drum +824:stethoscope +825:stole +826:stone wall +827:stopwatch, stop watch +828:stove +829:strainer +830:streetcar, tram, tramcar, trolley, trolley car +831:stretcher +832:studio couch, day bed +833:stupa, tope +834:submarine, pigboat, sub, U-boat +835:suit, suit of clothes +836:sundial +837:sunglass +838:sunglasses, dark glasses, shades +839:sunscreen, sunblock, sun blocker +840:suspension bridge +841:swab, swob, mop +842:sweatshirt +843:swimming trunks, bathing trunks +844:swing +845:switch, electric switch, electrical switch +846:syringe +847:table lamp +848:tank, army tank, armored combat vehicle, armoured combat vehicle +849:tape player +850:teapot +851:teddy, teddy bear +852:television, television system +853:tennis ball +854:thatch, thatched roof +855:theater curtain, theatre curtain +856:thimble +857:thresher, thrasher, threshing machine +858:throne +859:tile roof +860:toaster +861:tobacco shop, tobacconist shop, tobacconist +862:toilet seat +863:torch +864:totem pole +865:tow truck, tow car, wrecker +866:toyshop +867:tractor +868:trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi +869:tray +870:trench coat +871:tricycle, trike, velocipede +872:trimaran +873:tripod +874:triumphal arch +875:trolleybus, trolley coach, trackless trolley +876:trombone +877:tub, vat +878:turnstile +879:typewriter keyboard +880:umbrella +881:unicycle, monocycle +882:upright, upright piano +883:vacuum, vacuum cleaner +884:vase +885:vault +886:velvet +887:vending machine +888:vestment +889:viaduct +890:violin, fiddle +891:volleyball +892:waffle iron +893:wall clock +894:wallet, billfold, notecase, pocketbook +895:wardrobe, closet, press +896:warplane, military plane +897:washbasin, handbasin, washbowl, lavabo, wash-hand basin +898:washer, automatic washer, washing machine +899:water bottle +900:water jug +901:water tower +902:whiskey jug +903:whistle +904:wig +905:window screen +906:window shade +907:Windsor tie +908:wine bottle +909:wing +910:wok +911:wooden spoon +912:wool, woolen, woollen +913:worm fence, snake fence, snake-rail fence, Virginia fence +914:wreck +915:yawl +916:yurt +917:web site, website, internet site, site +918:comic book +919:crossword puzzle, crossword +920:street sign +921:traffic light, traffic signal, stoplight +922:book jacket, dust cover, dust jacket, dust wrapper +923:menu +924:plate +925:guacamole +926:consomme +927:hot pot, hotpot +928:trifle +929:ice cream, icecream +930:ice lolly, lolly, lollipop, popsicle +931:French loaf +932:bagel, beigel +933:pretzel +934:cheeseburger +935:hotdog, hot dog, red hot +936:mashed potato +937:head cabbage +938:broccoli +939:cauliflower +940:zucchini, courgette +941:spaghetti squash +942:acorn squash +943:butternut squash +944:cucumber, cuke +945:artichoke, globe artichoke +946:bell pepper +947:cardoon +948:mushroom +949:Granny Smith +950:strawberry +951:orange +952:lemon +953:fig +954:pineapple, ananas +955:banana +956:jackfruit, jak, jack +957:custard apple +958:pomegranate +959:hay +960:carbonara +961:chocolate sauce, chocolate syrup +962:dough +963:meat loaf, meatloaf +964:pizza, pizza pie +965:potpie +966:burrito +967:red wine +968:espresso +969:cup +970:eggnog +971:alp +972:bubble +973:cliff, drop, drop-off +974:coral reef +975:geyser +976:lakeside, lakeshore +977:promontory, headland, head, foreland +978:sandbar, sand bar +979:seashore, coast, seacoast, sea-coast +980:valley, vale +981:volcano +982:ballplayer, baseball player +983:groom, bridegroom +984:scuba diver +985:rapeseed +986:daisy +987:yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum +988:corn +989:acorn +990:hip, rose hip, rosehip +991:buckeye, horse chestnut, conker +992:coral fungus +993:agaric +994:gyromitra +995:stinkhorn, carrion fungus +996:earthstar +997:hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa +998:bolete +999:ear, spike, capitulum +1000:toilet tissue, toilet paper, bathroom tissue -- 2.7.4