src/armnnUtils/FloatingPointConverter.cpp \
src/armnnUtils/HeapProfiling.cpp \
src/armnnUtils/LeakChecking.cpp \
- src/armnnUtils/Logging.cpp \
src/armnnUtils/ParserHelper.cpp \
src/armnnUtils/Permute.cpp \
src/armnnUtils/TensorUtils.cpp \
include/armnnUtils/TensorUtils.hpp
src/armnnUtils/GraphTopologicalSort.hpp
src/armnnUtils/Half.hpp
- src/armnnUtils/Logging.hpp
- src/armnnUtils/Logging.cpp
src/armnnUtils/Permute.cpp
src/armnnUtils/DataLayoutIndexed.cpp
src/armnnUtils/DotSerializer.cpp
target_include_directories(armnnCaffeParser PRIVATE src/armnnUtils)
- target_link_libraries(armnnCaffeParser ${Boost_LOG_LIBRARY} ${Boost_THREAD_LIBRARY} ${Boost_SYSTEM_LIBRARY})
+ target_link_libraries(armnnCaffeParser ${Boost_THREAD_LIBRARY} ${Boost_SYSTEM_LIBRARY})
target_link_libraries(armnnCaffeParser armnn)
target_link_libraries(armnnCaffeParser ${PROTOBUF_LIBRARIES})
${Boost_SYSTEM_LIBRARY}
${Boost_PROGRAM_OPTIONS_LIBRARY}
${Boost_FILESYSTEM_LIBRARY}
- ${Boost_LOG_LIBRARY}
${Boost_THREAD_LIBRARY} )
add_executable_ex(ArmnnQuantizer
${Boost_SYSTEM_LIBRARY}
${Boost_PROGRAM_OPTIONS_LIBRARY}
${Boost_FILESYSTEM_LIBRARY}
- ${Boost_LOG_LIBRARY}
${Boost_THREAD_LIBRARY} )
target_link_libraries(ArmnnQuantizer
include/armnn/IRuntime.hpp
include/armnn/LayerSupport.hpp
include/armnn/LayerVisitorBase.hpp
+ include/armnn/Logging.hpp
include/armnn/LstmParams.hpp
include/armnn/MemorySources.hpp
include/armnn/NetworkFwd.hpp
install(DIRECTORY include/ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
-target_link_libraries(armnn ${Boost_LOG_LIBRARY} ${Boost_THREAD_LIBRARY}
+target_link_libraries(armnn ${Boost_THREAD_LIBRARY}
${Boost_SYSTEM_LIBRARY} ${Boost_FILESYSTEM_LIBRARY})
if(ARMCOMPUTENEON OR ARMCOMPUTECL)
#include "Exceptions.hpp"
#include "INetwork.hpp"
#include "IRuntime.hpp"
+#include "Logging.hpp"
#include "LstmParams.hpp"
#include "Optional.hpp"
#include "QuantizedLstmParams.hpp"
--- /dev/null
+//
+// Copyright © 2019 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#pragma once
+
+#include <iostream>
+
+#include "Utils.hpp"
+
+
+#if defined(_MSC_VER)
+#include <Windows.h>
+#endif
+
+#if defined(__ANDROID__)
+#include <android/log.h>
+#endif
+
+#include <boost/assert.hpp>
+
+
+namespace armnn
+{
+
+inline std::string LevelToString(LogSeverity level)
+{
+ switch(level)
+ {
+ case LogSeverity::Trace:
+ return "Trace";
+ case LogSeverity::Debug:
+ return "Debug";
+ case LogSeverity::Info:
+ return "Info";
+ case LogSeverity::Warning:
+ return "Warning";
+ case LogSeverity::Error:
+ return "Error";
+ case LogSeverity::Fatal:
+ return "Fatal";
+ default:
+ return "Log";
+ }
+}
+
+class LogSink
+{
+public:
+ virtual ~LogSink(){};
+
+ virtual void Consume(const std::string& s) = 0;
+private:
+
+};
+
+class StandardOutputColourSink : public LogSink
+{
+public:
+ StandardOutputColourSink(LogSeverity level = LogSeverity::Info)
+ : m_Level(level)
+ {
+ }
+
+ void Consume(const std::string& s) override
+ {
+ std::cout << GetColour(m_Level) << s << ResetColour() << std::endl;
+ }
+
+private:
+ std::string ResetColour()
+ {
+ return "\033[0m";
+ }
+
+ std::string GetColour(LogSeverity level)
+ {
+ switch(level)
+ {
+ case LogSeverity::Trace:
+ return "\033[35m";
+ case LogSeverity::Debug:
+ return "\033[32m";
+ case LogSeverity::Info:
+ return "\033[0m";
+ case LogSeverity::Warning:
+ return "\033[33m";
+ case LogSeverity::Error:
+ return "\033[31m";
+ case LogSeverity::Fatal:
+ return "\033[41;30m";
+
+ default:
+ return "\033[0m";
+ }
+ }
+ LogSeverity m_Level;
+};
+
+class StandardOutputSink : public LogSink
+{
+public:
+ void Consume(const std::string& s) override
+ {
+ std::cout << s << std::endl;
+ }
+};
+
+class DebugOutputSink : public LogSink
+{
+public:
+ void Consume(const std::string& s) override
+ {
+#if defined(_MSC_VER)
+ OutputDebugString(s.c_str());
+ OutputDebugString("\n");
+#endif
+#if defined(__ANDROID__)
+ __android_log_write(ANDROID_LOG_DEBUG, "armnn", s.c_str());
+#endif
+ }
+};
+
+struct ScopedRecord
+{
+ ScopedRecord(const std::vector<std::shared_ptr<LogSink>>& sinks, LogSeverity level, bool enabled)
+ : m_LogSinks(sinks)
+ , m_Enabled(enabled)
+ {
+ if (enabled)
+ {
+ m_Os << LevelToString(level) << ": ";
+ }
+ }
+
+ ~ScopedRecord()
+ {
+ if (m_Enabled)
+ {
+ for (auto sink : m_LogSinks)
+ {
+ if (sink)
+ {
+ sink->Consume(m_Os.str());
+ }
+ }
+ }
+ }
+
+ ScopedRecord(const ScopedRecord&) = delete;
+ ScopedRecord& operator=(const ScopedRecord&) = delete;
+ ScopedRecord(ScopedRecord&& other) = default;
+ ScopedRecord& operator=(ScopedRecord&&) = default;
+
+ template<typename Streamable>
+ ScopedRecord& operator<<(const Streamable& s)
+ {
+ if (m_Enabled)
+ {
+ m_Os << s;
+ }
+ return (*this);
+ }
+
+private:
+ const std::vector<std::shared_ptr<LogSink>>& m_LogSinks;
+ std::ostringstream m_Os;
+ bool m_Enabled;
+};
+
+template<LogSeverity Level>
+class SimpleLogger
+{
+public:
+ SimpleLogger()
+ : m_Sinks{std::make_shared<StandardOutputSink>()}
+ , m_Enable(true)
+ {
+ }
+
+ static SimpleLogger& Get()
+ {
+ static SimpleLogger<Level> logger;
+ return logger;
+ }
+
+ void Enable(bool enable = true)
+ {
+ m_Enable = enable;
+ }
+
+ ScopedRecord StartNewRecord()
+ {
+ ScopedRecord record(m_Sinks, Level, m_Enable);
+ return record;
+ }
+
+ void RemoveAllSinks()
+ {
+ m_Sinks.clear();
+ }
+
+ void AddSink(std::shared_ptr<LogSink> sink)
+ {
+ m_Sinks.push_back(sink);
+ }
+private:
+ std::vector<std::shared_ptr<LogSink>> m_Sinks;
+ bool m_Enable;
+};
+
+inline void SetLogFilter(LogSeverity level)
+{
+ SimpleLogger<LogSeverity::Trace>::Get().Enable(false);
+ SimpleLogger<LogSeverity::Debug>::Get().Enable(false);
+ SimpleLogger<LogSeverity::Info>::Get().Enable(false);
+ SimpleLogger<LogSeverity::Warning>::Get().Enable(false);
+ SimpleLogger<LogSeverity::Error>::Get().Enable(false);
+ SimpleLogger<LogSeverity::Fatal>::Get().Enable(false);
+ switch (level)
+ {
+ case LogSeverity::Trace:
+ SimpleLogger<LogSeverity::Trace>::Get().Enable(true);
+ ARMNN_FALLTHROUGH;
+ case LogSeverity::Debug:
+ SimpleLogger<LogSeverity::Debug>::Get().Enable(true);
+ ARMNN_FALLTHROUGH;
+ case LogSeverity::Info:
+ SimpleLogger<LogSeverity::Info>::Get().Enable(true);
+ ARMNN_FALLTHROUGH;
+ case LogSeverity::Warning:
+ SimpleLogger<LogSeverity::Warning>::Get().Enable(true);
+ ARMNN_FALLTHROUGH;
+ case LogSeverity::Error:
+ SimpleLogger<LogSeverity::Error>::Get().Enable(true);
+ ARMNN_FALLTHROUGH;
+ case LogSeverity::Fatal:
+ SimpleLogger<LogSeverity::Fatal>::Get().Enable(true);
+ break;
+ default:
+ BOOST_ASSERT(false);
+ }
+}
+
+template<LogSeverity Level>
+inline void SetLoggingSinks(bool standardOut, bool debugOut, bool coloured)
+{
+ SimpleLogger<Level>::Get().RemoveAllSinks();
+
+ if (standardOut)
+ {
+ if (coloured)
+ {
+ SimpleLogger<Level>::Get().AddSink(
+ std::make_shared<StandardOutputColourSink>(Level));
+ } else
+ {
+ SimpleLogger<Level>::Get().AddSink(
+ std::make_shared<StandardOutputSink>());
+ }
+ }
+
+ if (debugOut)
+ {
+ SimpleLogger<Level>::Get().AddSink(
+ std::make_shared<DebugOutputSink>());
+ }
+}
+
+inline void SetAllLoggingSinks(bool standardOut, bool debugOut, bool coloured)
+{
+ SetLoggingSinks<LogSeverity::Trace>(standardOut, debugOut, coloured);
+ SetLoggingSinks<LogSeverity::Debug>(standardOut, debugOut, coloured);
+ SetLoggingSinks<LogSeverity::Info>(standardOut, debugOut, coloured);
+ SetLoggingSinks<LogSeverity::Warning>(standardOut, debugOut, coloured);
+ SetLoggingSinks<LogSeverity::Error>(standardOut, debugOut, coloured);
+ SetLoggingSinks<LogSeverity::Fatal>(standardOut, debugOut, coloured);
+}
+
+enum class BoostLogSeverityMapping
+{
+ trace,
+ debug,
+ info,
+ warning,
+ error,
+ fatal
+};
+
+constexpr LogSeverity ConvertLogSeverity(BoostLogSeverityMapping severity)
+{
+ return static_cast<LogSeverity>(severity);
+}
+
+
+#define ARMNN_LOG(severity) \
+ armnn::SimpleLogger<ConvertLogSeverity(armnn::BoostLogSeverityMapping::severity)>::Get().StartNewRecord()
+
+} //namespace armnn
/// severity: All log messages that are at this severity level or higher will be printed, others will be ignored.
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity);
+
+#if defined(__clang__) &&((__clang_major__>=3)||(__clang_major__==3 && __clang_minor__ >= 5))
+# define ARMNN_FALLTHROUGH [[clang::fallthrough]]
+#elif defined(__GNUC__) && (__GNUC__ >= 7)
+# define ARMNN_FALLTHROUGH __attribute__((fallthrough))
+#else
+# define ARMNN_FALLTHROUGH ((void)0)
+#endif
+
} // namespace armnn
// SPDX-License-Identifier: MIT
//
#include "armnn/Descriptors.hpp"
+#include "armnn/Logging.hpp"
#include <algorithm>
#include <array>
#include <vector>
#include <boost/format.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/numeric/conversion/cast.hpp>
namespace armnn
{
if (view >= m_NumViews)
{
- BOOST_LOG_TRIVIAL(error) << "OriginsDescriptor::SetViewOriginCoord: view argument:" << view <<
+ ARMNN_LOG(error) << "OriginsDescriptor::SetViewOriginCoord: view argument:" << view <<
" is out of range";
return Status::Failure;
}
if (coord >= m_NumDimensions)
{
- BOOST_LOG_TRIVIAL(error) << "OriginsDescriptor::SetViewOriginCoord: coord argument:" << coord <<
+ ARMNN_LOG(error) << "OriginsDescriptor::SetViewOriginCoord: coord argument:" << coord <<
" is out of range";
return Status::Failure;
}
{
if (!m_ViewSizes)
{
- BOOST_LOG_TRIVIAL(error) << "ViewsDescriptor::SetViewSize: invalid view sizes";
+ ARMNN_LOG(error) << "ViewsDescriptor::SetViewSize: invalid view sizes";
return Status::Failure;
}
if (view >= GetNumViews())
{
- BOOST_LOG_TRIVIAL(error) << "ViewsDescriptor::SetViewSize: view argument:" << view <<
+ ARMNN_LOG(error) << "ViewsDescriptor::SetViewSize: view argument:" << view <<
" is out of range";
return Status::Failure;
}
if (coord >= GetNumDimensions())
{
- BOOST_LOG_TRIVIAL(error) << "ViewsDescriptor::SetViewSize: coord argument:" << coord <<
+ ARMNN_LOG(error) << "ViewsDescriptor::SetViewSize: coord argument:" << coord <<
" is out of range";
return Status::Failure;
}
#include <armnn/TypesUtils.hpp>
#include <boost/polymorphic_cast.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/assert.hpp>
#include <boost/format.hpp>
{
if (m_Layers.empty())
{
- BOOST_LOG_TRIVIAL(info) << "\n Graph is empty.\n";
+ ARMNN_LOG(info) << "\n Graph is empty.\n";
return Status::Success;
}
- BOOST_LOG_TRIVIAL(info) << "\n";
- BOOST_LOG_TRIVIAL(info) << "Walking Pattern: \n";
+ ARMNN_LOG(info) << "\n";
+ ARMNN_LOG(info) << "Walking Pattern: \n";
for (auto&& it : TopologicalSort())
{
- BOOST_LOG_TRIVIAL(info) << it->GetName() << ":" << GetLayerTypeAsCString(it->GetType())
+ ARMNN_LOG(info) << it->GetName() << ":" << GetLayerTypeAsCString(it->GetType())
<< ":" << it->GetBackendId().Get();
}
- BOOST_LOG_TRIVIAL(info) << "\n\n";
+ ARMNN_LOG(info) << "\n\n";
return Status::Success;
}
#include <boost/cast.hpp>
#include <boost/format.hpp>
-#include <boost/log/trivial.hpp>
#include <numeric>
#include <boost/polymorphic_cast.hpp>
#include <boost/assert.hpp>
#include <boost/format.hpp>
-#include <boost/log/trivial.hpp>
namespace armnn
{
auto Fail = [&](const std::exception& error) -> std::unique_ptr<LoadedNetwork>
{
errorMessage = ToErrorMessage("An error occurred when preparing the network workloads: ", error);
- BOOST_LOG_TRIVIAL(error) << errorMessage;
+ ARMNN_LOG(error) << errorMessage;
return std::unique_ptr<LoadedNetwork>();
};
// Walk graph to determine the order of execution.
if (graph.GetNumLayers() < 2)
{
- BOOST_LOG_TRIVIAL(warning) << "IRuntime::EnqueueWorkload()::Less than two nodes in graph";
+ ARMNN_LOG(warning) << "IRuntime::EnqueueWorkload()::Less than two nodes in graph";
return Status::Failure;
}
auto Fail = [&](const std::exception& error)
{
- BOOST_LOG_TRIVIAL(error) << "An error occurred attempting to execute a workload: " << error.what();
+ ARMNN_LOG(error) << "An error occurred attempting to execute a workload: " << error.what();
success = false;
};
#include <boost/assert.hpp>
#include <boost/format.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/numeric/conversion/converter_policies.hpp>
#include <boost/cast.hpp>
{
std::stringstream fullErrorMessage;
fullErrorMessage << "ERROR: " << errorMessage;
- BOOST_LOG_TRIVIAL(warning) << fullErrorMessage.str();
+ ARMNN_LOG(warning) << fullErrorMessage.str();
if (errorMessages)
{
errorMessages.value().push_back(fullErrorMessage.str());
{
std::stringstream fullWarningMessage;
fullWarningMessage << "WARNING: " << warningMessage;
- BOOST_LOG_TRIVIAL(warning) << fullWarningMessage.str();
+ ARMNN_LOG(warning) << fullWarningMessage.str();
if (warningMessages)
{
warningMessages.value().push_back(fullWarningMessage.str());
ss << "Quantization parameters for Softmax layer (Scale: " <<
info.GetQuantizationScale() << " and Offset: " << info.GetQuantizationOffset() <<
") are incorrect and have been updated to Scale: 0.00390625 and Offset: 0";
- BOOST_LOG_TRIVIAL(warning) << ss.str();
+ ARMNN_LOG(warning) << ss.str();
info.SetQuantizationScale((1.0f /256.0f));
info.SetQuantizationOffset(0);
outputSlot.SetTensorInfo(info);
#include <iostream>
-#include <boost/log/trivial.hpp>
#include <boost/polymorphic_cast.hpp>
using namespace armnn;
if (!unloadOk)
{
- BOOST_LOG_TRIVIAL(warning) << "Runtime::UnloadNetwork(): failed to unload "
- "network with ID:" << networkId << " because BeforeUnloadNetwork failed";
+ ARMNN_LOG(warning) << "Runtime::UnloadNetwork(): failed to unload "
+ "network with ID:" << networkId << " because BeforeUnloadNetwork failed";
return Status::Failure;
}
if (m_LoadedNetworks.erase(networkId) == 0)
{
- BOOST_LOG_TRIVIAL(warning) << "WARNING: Runtime::UnloadNetwork(): " << networkId << " not found!";
+ ARMNN_LOG(warning) << "WARNING: Runtime::UnloadNetwork(): " << networkId << " not found!";
return Status::Failure;
}
}
context.second->AfterUnloadNetwork(networkId);
}
- BOOST_LOG_TRIVIAL(debug) << "Runtime::UnloadNetwork(): Unloaded network with ID: " << networkId;
+ ARMNN_LOG(debug) << "Runtime::UnloadNetwork(): Unloaded network with ID: " << networkId;
return Status::Success;
}
: m_NetworkIdCounter(0)
, m_DeviceSpec{BackendRegistryInstance().GetBackendIds()}
{
- BOOST_LOG_TRIVIAL(info) << "ArmNN v" << ARMNN_VERSION << "\n";
+ ARMNN_LOG(info) << "ArmNN v" << ARMNN_VERSION << "\n";
// pass configuration info to the profiling service
armnn::profiling::ProfilingService::Instance().ConfigureProfilingService(options.m_ProfilingOptions);
#include "armnn/TypesUtils.hpp"
#include <boost/assert.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/numeric/conversion/cast.hpp>
#include <sstream>
// Copyright © 2017 Arm Ltd. All rights reserved.
// SPDX-License-Identifier: MIT
//
+#include "armnn/Logging.hpp"
#include "armnn/Utils.hpp"
-#include "Logging.hpp"
-
-#include <boost/log/core.hpp>
namespace armnn
{
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
{
- using armnnUtils::ConfigureLogging;
- ConfigureLogging(boost::log::core::get().get(), printToStandardOutput, printToDebugOutput, severity);
+ SetAllLoggingSinks(printToStandardOutput, printToDebugOutput, false);
+ SetLogFilter(severity);
}
// Defaults to logging completely disabled.
#include <boost/test/unit_test.hpp>
#include <boost/filesystem.hpp>
-#include <boost/log/core/core.hpp>
#include <boost/optional.hpp>
#include <boost/variant.hpp>
#include <iostream>
#include <boost/test/unit_test.hpp>
#include "UnitTests.hpp"
+#include <armnn/Logging.hpp>
+
+#include <boost/algorithm/string.hpp>
struct ConfigureLoggingFixture
{
BOOST_GLOBAL_FIXTURE(SetupDebugOutput);
-#endif // defined(_MSC_VER)
\ No newline at end of file
+#endif // defined(_MSC_VER)
+
+
+BOOST_AUTO_TEST_SUITE(LoggerSuite)
+
+BOOST_AUTO_TEST_CASE(LoggerTest)
+{
+ std::stringstream ss;
+
+ {
+ struct StreamRedirector
+ {
+ public:
+ StreamRedirector(std::ostream& stream, std::streambuf* newStreamBuffer)
+ : m_Stream(stream)
+ , m_BackupBuffer(m_Stream.rdbuf(newStreamBuffer))
+ {}
+ ~StreamRedirector() { m_Stream.rdbuf(m_BackupBuffer); }
+
+ private:
+ std::ostream& m_Stream;
+ std::streambuf* m_BackupBuffer;
+ };
+
+
+ StreamRedirector redirect(std::cout, ss.rdbuf());
+
+ using namespace armnn;
+ SetLogFilter(LogSeverity::Trace);
+ SetAllLoggingSinks(true, false, false);
+
+
+ ARMNN_LOG(trace) << "My trace message; " << -2;
+ ARMNN_LOG(debug) << "My debug message; " << -1;
+ ARMNN_LOG(info) << "My info message; " << 0;
+ ARMNN_LOG(warning) << "My warning message; " << 1;
+ ARMNN_LOG(error) << "My error message; " << 2;
+ ARMNN_LOG(fatal) << "My fatal message; " << 3;
+
+ SetLogFilter(LogSeverity::Fatal);
+
+ }
+
+ BOOST_CHECK(boost::contains(ss.str(), "Trace: My trace message; -2"));
+ BOOST_CHECK(boost::contains(ss.str(), "Debug: My debug message; -1"));
+ BOOST_CHECK(boost::contains(ss.str(), "Info: My info message; 0"));
+ BOOST_CHECK(boost::contains(ss.str(), "Warning: My warning message; 1"));
+ BOOST_CHECK(boost::contains(ss.str(), "Error: My error message; 2"));
+ BOOST_CHECK(boost::contains(ss.str(), "Fatal: My fatal message; 3"));
+}
+
+BOOST_AUTO_TEST_SUITE_END()
//
#pragma once
-#include <Logging.hpp>
+#include <armnn/Logging.hpp>
#include <armnn/Utils.hpp>
#include <reference/RefWorkloadFactory.hpp>
#include <backendsCommon/test/LayerTests.hpp>
{
// Configures logging for both the ARMNN library and this test program.
armnn::ConfigureLogging(true, true, armnn::LogSeverity::Fatal);
- armnnUtils::ConfigureLogging(boost::log::core::get().get(), true, true, armnn::LogSeverity::Fatal);
}
// The following macros require the caller to have defined FactoryType, with one of the following using statements:
#include <boost/numeric/conversion/cast.hpp>
#include <boost/assert.hpp>
#include <boost/format.hpp>
-#include <boost/log/trivial.hpp>
// Caffe
#include "caffe/proto/caffe.pb.h"
#include <armnnTfLiteParser/ITfLiteParser.hpp>
#endif
-#include <Logging.hpp>
#include <HeapProfiling.hpp>
#include <boost/format.hpp>
}
catch (const std::exception& e)
{
- BOOST_LOG_TRIVIAL(error) << "An error occurred when splitting tokens: " << e.what();
+ ARMNN_LOG(error) << "An error occurred when splitting tokens: " << e.what();
continue;
}
for (const std::string& token : tokens)
}
catch (const std::exception&)
{
- BOOST_LOG_TRIVIAL(error) << "'" << token << "' is not a valid number. It has been ignored.";
+ ARMNN_LOG(error) << "'" << token << "' is not a valid number. It has been ignored.";
}
}
}
}
else
{
- BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'";
+ ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'";
return EXIT_FAILURE;
}
&& !defined(ARMNN_ONNX_PARSER) \
&& !defined(ARMNN_TF_PARSER) \
&& !defined(ARMNN_TF_LITE_PARSER))
- BOOST_LOG_TRIVIAL(fatal) << "Not built with any of the supported parsers, Caffe, Onnx, Tensorflow, or TfLite.";
+ ARMNN_LOG(fatal) << "Not built with any of the supported parsers, Caffe, Onnx, Tensorflow, or TfLite.";
return EXIT_FAILURE;
#endif
#if !defined(ARMNN_SERIALIZER)
- BOOST_LOG_TRIVIAL(fatal) << "Not built with Serializer support.";
+ ARMNN_LOG(fatal) << "Not built with Serializer support.";
return EXIT_FAILURE;
#endif
#endif
armnn::ConfigureLogging(true, true, level);
- armnnUtils::ConfigureLogging(boost::log::core::get().get(), true, true, level);
std::string modelFormat;
std::string modelPath;
}
catch (const armnn::InvalidArgumentException& e)
{
- BOOST_LOG_TRIVIAL(fatal) << "Cannot create tensor shape: " << e.what();
+ ARMNN_LOG(fatal) << "Cannot create tensor shape: " << e.what();
return EXIT_FAILURE;
}
}
#if defined(ARMNN_CAFFE_PARSER)
if (!converter.CreateNetwork<armnnCaffeParser::ICaffeParser>())
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to load model from file";
+ ARMNN_LOG(fatal) << "Failed to load model from file";
return EXIT_FAILURE;
}
#else
- BOOST_LOG_TRIVIAL(fatal) << "Not built with Caffe parser support.";
+ ARMNN_LOG(fatal) << "Not built with Caffe parser support.";
return EXIT_FAILURE;
#endif
}
#if defined(ARMNN_ONNX_PARSER)
if (!converter.CreateNetwork<armnnOnnxParser::IOnnxParser>())
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to load model from file";
+ ARMNN_LOG(fatal) << "Failed to load model from file";
return EXIT_FAILURE;
}
#else
- BOOST_LOG_TRIVIAL(fatal) << "Not built with Onnx parser support.";
+ ARMNN_LOG(fatal) << "Not built with Onnx parser support.";
return EXIT_FAILURE;
#endif
}
#if defined(ARMNN_TF_PARSER)
if (!converter.CreateNetwork<armnnTfParser::ITfParser>())
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to load model from file";
+ ARMNN_LOG(fatal) << "Failed to load model from file";
return EXIT_FAILURE;
}
#else
- BOOST_LOG_TRIVIAL(fatal) << "Not built with Tensorflow parser support.";
+ ARMNN_LOG(fatal) << "Not built with Tensorflow parser support.";
return EXIT_FAILURE;
#endif
}
#if defined(ARMNN_TF_LITE_PARSER)
if (!isModelBinary)
{
- BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
+ ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
for tflite files";
return EXIT_FAILURE;
}
if (!converter.CreateNetwork<armnnTfLiteParser::ITfLiteParser>())
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to load model from file";
+ ARMNN_LOG(fatal) << "Failed to load model from file";
return EXIT_FAILURE;
}
#else
- BOOST_LOG_TRIVIAL(fatal) << "Not built with TfLite parser support.";
+ ARMNN_LOG(fatal) << "Not built with TfLite parser support.";
return EXIT_FAILURE;
#endif
}
else
{
- BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'";
+ ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'";
return EXIT_FAILURE;
}
if (!converter.Serialize())
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to serialize model";
+ ARMNN_LOG(fatal) << "Failed to serialize model";
return EXIT_FAILURE;
}
#include <boost/core/ignore_unused.hpp>
#include <boost/assert.hpp>
#include <boost/format.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/format.hpp>
#include <boost/numeric/conversion/cast.hpp>
#include <boost/polymorphic_cast.hpp>
#include <armnn/ArmNN.hpp>
#include <armnn/Exceptions.hpp>
+#include <armnn/Logging.hpp>
#include <armnn/TypesUtils.hpp>
#include <boost/filesystem.hpp>
#include <boost/core/ignore_unused.hpp>
#include <boost/assert.hpp>
#include <boost/format.hpp>
-#include <boost/log/trivial.hpp>
-#include <boost/format.hpp>
#include <boost/numeric/conversion/cast.hpp>
#include <fstream>
errorString << "Failed to parse operator #" << operatorIndex
<< " within subgraph #" << subgraphIndex
<< " error: " << e.what();
- BOOST_LOG_TRIVIAL(error) << errorString.str();
+ ARMNN_LOG(error) << errorString.str();
errors << errorString.str() << "\n";
}
+++ /dev/null
-//
-// Copyright © 2017 Arm Ltd. All rights reserved.
-// SPDX-License-Identifier: MIT
-//
-#include "Logging.hpp"
-
-#include <string>
-#include <iostream>
-
-#if defined(_MSC_VER)
-#include <Windows.h>
-#endif
-
-#if defined(__ANDROID__)
-#include <android/log.h>
-#endif
-
-#include <boost/make_shared.hpp>
-#include <boost/log/core.hpp>
-#include <boost/log/sinks.hpp>
-#include <boost/log/sinks/debug_output_backend.hpp>
-#include <boost/log/sinks/basic_sink_backend.hpp>
-#include <boost/log/sinks/text_ostream_backend.hpp>
-#include <boost/log/utility/setup/console.hpp>
-
-namespace armnnUtils
-{
-
-struct DebugOutputSink : boost::log::sinks::basic_formatted_sink_backend<char, boost::log::sinks::concurrent_feeding>
-{
- void consume(boost::log::record_view const& rec, std::string const& formatted_message)
- {
-#if defined(_MSC_VER)
- OutputDebugString(formatted_message.c_str());
- OutputDebugString("\n");
-#endif
-#if defined(__ANDROID__)
- __android_log_write(ANDROID_LOG_DEBUG, "armnn", formatted_message.c_str());
-#endif
- }
-};
-
-void ConfigureLogging(boost::log::core* core, bool printToStandardOutput, bool printToDebugOutput,
- armnn::LogSeverity severity)
-{
- // Even if we remove all the sinks, Boost will fallback to the 'default sink' and still print stuff to
- // stdout, so we have to explicitly disable logging in this case.
- core->set_logging_enabled(printToStandardOutput || printToDebugOutput);
-
- // Sets up severity filter.
- boost::log::trivial::severity_level boostSeverity;
- switch (severity)
- {
- case armnn::LogSeverity::Trace:
- boostSeverity = boost::log::trivial::trace;
- break;
- case armnn::LogSeverity::Debug:
- boostSeverity = boost::log::trivial::debug;
- break;
- case armnn::LogSeverity::Info:
- boostSeverity = boost::log::trivial::info;
- break;
- case armnn::LogSeverity::Warning:
- boostSeverity = boost::log::trivial::warning;
- break;
- case armnn::LogSeverity::Error:
- boostSeverity = boost::log::trivial::error;
- break;
- case armnn::LogSeverity::Fatal:
- boostSeverity = boost::log::trivial::fatal;
- break;
- default:
- BOOST_ASSERT_MSG(false, "Invalid severity");
- }
- core->set_filter(boost::log::trivial::severity >= boostSeverity);
-
- core->remove_all_sinks();
- if (printToStandardOutput)
- {
- typedef boost::log::sinks::basic_text_ostream_backend<char> backend_t;
- boost::shared_ptr<backend_t> backend = boost::make_shared<backend_t>();
-
- boost::shared_ptr<std::basic_ostream<char>> stream(&std::cout, boost::null_deleter());
- backend->add_stream(stream);
-
- typedef boost::log::sinks::synchronous_sink<backend_t> sink_t;
- boost::shared_ptr<sink_t> standardOutputSink = boost::make_shared<sink_t>(backend);
-
- core->add_sink(standardOutputSink);
- }
- if (printToDebugOutput)
- {
- typedef boost::log::sinks::synchronous_sink<DebugOutputSink> sink_t;
- boost::shared_ptr<sink_t> debugOutputSink(new sink_t());
- core->add_sink(debugOutputSink);
- }
-}
-
-}
+++ /dev/null
-//
-// Copyright © 2017 Arm Ltd. All rights reserved.
-// SPDX-License-Identifier: MIT
-//
-#pragma once
-
-
-#include "armnn/Utils.hpp"
-
-#include <boost/log/trivial.hpp>
-
-namespace armnnUtils
-{
-
-// Configures logging for the given Boost Log Core object.
-void ConfigureLogging(boost::log::core* core,
- bool printToStandardOutput,
- bool printToDebugOutput,
- armnn::LogSeverity severity);
-
-}
\ No newline at end of file
//
#include "ModelAccuracyChecker.hpp"
+
+#include <armnn/Logging.hpp>
+
#include <boost/filesystem.hpp>
-#include <boost/log/trivial.hpp>
#include <map>
#include <vector>
{
if (k > 10)
{
- BOOST_LOG_TRIVIAL(warning) << "Accuracy Tool only supports a maximum of Top 10 Accuracy. "
- "Printing Top 10 Accuracy result!";
+ ARMNN_LOG(warning) << "Accuracy Tool only supports a maximum of Top 10 Accuracy. "
+ "Printing Top 10 Accuracy result!";
k = 10;
}
unsigned int total = 0;
#include <boost/filesystem.hpp>
#include <boost/algorithm/string.hpp>
-#include <boost/log/trivial.hpp>
#include <regex>
{
if (!IsPathValid(overrideBackendPath))
{
- BOOST_LOG_TRIVIAL(warning) << "WARNING: The given override path for dynamic backends \""
- << overrideBackendPath << "\" is not valid";
+ ARMNN_LOG(warning) << "WARNING: The given override path for dynamic backends \""
+ << overrideBackendPath << "\" is not valid";
return {};
}
{
if (path.empty())
{
- BOOST_LOG_TRIVIAL(warning) << "WARNING: The given backend path is empty";
+ ARMNN_LOG(warning) << "WARNING: The given backend path is empty";
return false;
}
if (!boost::filesystem::exists(boostPath))
{
- BOOST_LOG_TRIVIAL(warning) << "WARNING: The given backend path \"" << path << "\" does not exist";
+ ARMNN_LOG(warning) << "WARNING: The given backend path \"" << path << "\" does not exist";
return false;
}
if (!boost::filesystem::is_directory(boostPath))
{
- BOOST_LOG_TRIVIAL(warning) << "WARNING: The given backend path \"" << path << "\" is not a directory";
+ ARMNN_LOG(warning) << "WARNING: The given backend path \"" << path << "\" is not a directory";
return false;
}
if (!boostPath.is_absolute())
{
- BOOST_LOG_TRIVIAL(warning) << "WARNING: The given backend path \"" << path << "\" is not absolute";
+ ARMNN_LOG(warning) << "WARNING: The given backend path \"" << path << "\" is not absolute";
return false;
}
}
catch (const filesystem_error& e)
{
- BOOST_LOG_TRIVIAL(warning) << "GetSharedObjects warning: " << e.what();
+ ARMNN_LOG(warning) << "GetSharedObjects warning: " << e.what();
}
if (canonicalPath.empty())
{
}
catch (const std::exception& e)
{
- BOOST_LOG_TRIVIAL(warning) << "GetSharedObjects warning: " << e.what();
+ ARMNN_LOG(warning) << "GetSharedObjects warning: " << e.what();
}
if (!filenameMatch)
{
}
catch (const RuntimeException& e)
{
- BOOST_LOG_TRIVIAL(warning) << "Cannot create a handle to the shared object file \""
- << sharedObject << "\": " << e.what();
+ ARMNN_LOG(warning) << "Cannot create a handle to the shared object file \""
+ << sharedObject << "\": " << e.what();
continue;
}
if (!sharedObjectHandle)
{
- BOOST_LOG_TRIVIAL(warning) << "Invalid handle to the shared object file \"" << sharedObject << "\"";
+ ARMNN_LOG(warning) << "Invalid handle to the shared object file \"" << sharedObject << "\"";
continue;
}
}
catch (const Exception& e)
{
- BOOST_LOG_TRIVIAL(warning) << "Cannot create a valid dynamic backend from the shared object file \""
- << sharedObject << "\": " << e.what();
+ ARMNN_LOG(warning) << "Cannot create a valid dynamic backend from the shared object file \""
+ << sharedObject << "\": " << e.what();
continue;
}
if (!dynamicBackend)
{
- BOOST_LOG_TRIVIAL(warning) << "Invalid dynamic backend object for the shared object file \""
- << sharedObject << "\"";
+ ARMNN_LOG(warning) << "Invalid dynamic backend object for the shared object file \""
+ << sharedObject << "\"";
continue;
}
}
catch (const RuntimeException& e)
{
- BOOST_LOG_TRIVIAL(warning) << "Cannot register dynamic backend, "
- << "an error has occurred when getting the backend id: " << e.what();
+ ARMNN_LOG(warning) << "Cannot register dynamic backend, "
+ << "an error has occurred when getting the backend id: " << e.what();
continue;
}
if (dynamicBackendId.IsEmpty() ||
dynamicBackendId.IsUndefined())
{
- BOOST_LOG_TRIVIAL(warning) << "Cannot register dynamic backend, invalid backend id: " << dynamicBackendId;
+ ARMNN_LOG(warning) << "Cannot register dynamic backend, invalid backend id: " << dynamicBackendId;
continue;
}
bool backendAlreadyRegistered = backendRegistry.IsBackendRegistered(dynamicBackendId);
if (backendAlreadyRegistered)
{
- BOOST_LOG_TRIVIAL(warning) << "Cannot register dynamic backend \"" << dynamicBackendId
- << "\": backend already registered";
+ ARMNN_LOG(warning) << "Cannot register dynamic backend \"" << dynamicBackendId
+ << "\": backend already registered";
continue;
}
}
catch (const RuntimeException& e)
{
- BOOST_LOG_TRIVIAL(warning) << "Cannot register dynamic backend \"" << dynamicBackendId
- << "\": an error has occurred when getting the backend factory function: "
- << e.what();
+ ARMNN_LOG(warning) << "Cannot register dynamic backend \"" << dynamicBackendId
+ << "\": an error has occurred when getting the backend factory function: "
+ << e.what();
continue;
}
if (dynamicBackendFactoryFunction == nullptr)
{
- BOOST_LOG_TRIVIAL(warning) << "Cannot register dynamic backend \"" << dynamicBackendId
- << "\": invalid backend factory function";
+ ARMNN_LOG(warning) << "Cannot register dynamic backend \"" << dynamicBackendId
+ << "\": invalid backend factory function";
continue;
}
}
catch (const InvalidArgumentException& e)
{
- BOOST_LOG_TRIVIAL(warning) << "An error has occurred when registering the dynamic backend \""
- << dynamicBackendId << "\": " << e.what();
+ ARMNN_LOG(warning) << "An error has occurred when registering the dynamic backend \""
+ << dynamicBackendId << "\": " << e.what();
continue;
}
#include <backendsCommon/WorkloadFactory.hpp>
#include <boost/assert.hpp>
-#include <boost/log/trivial.hpp>
namespace armnn
{
//
#include "ClBackendContext.hpp"
+
+#include <armnn/Logging.hpp>
+
#include "ClContextControl.hpp"
#include <arm_compute/core/CL/OpenCL.h>
#include <arm_compute/core/CL/CLKernelLibrary.h>
#include <arm_compute/runtime/CL/CLScheduler.h>
-#include <boost/log/trivial.hpp>
-
namespace armnn
{
}
catch (const cl::Error&)
{
- BOOST_LOG_TRIVIAL(warning) << "WARNING: Runtime::UnloadNetwork(): an error occurred while waiting for "
- "the queued CL requests to finish";
+ ARMNN_LOG(warning) << "Runtime::UnloadNetwork(): an error occurred while waiting for "
+ "the queued CL requests to finish";
return false;
}
}
#include <boost/assert.hpp>
#include <boost/format.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/polymorphic_cast.hpp>
#include <boost/core/ignore_unused.hpp>
#include <boost/polymorphic_cast.hpp>
#include <boost/format.hpp>
-#include <boost/log/trivial.hpp>
namespace armnn
{
#include "workloads/RefWorkloads.hpp"
#include "RefTensorHandle.hpp"
-#include <boost/log/trivial.hpp>
namespace armnn
{
#include "Activation.hpp"
-#include <boost/log/trivial.hpp>
-
#include <cmath>
namespace armnn
#include <Profiling.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/numeric/conversion/cast.hpp>
using namespace armnn;
}
else
{
- BOOST_LOG_TRIVIAL(warning) << "Illegal NORMALIZATION mode in normalization_f32";
+ ARMNN_LOG(warning) << "Illegal NORMALIZATION mode in normalization_f32";
return;
}
}
else
{
- BOOST_LOG_TRIVIAL(warning) << "Lcr method (Jarret 2009: Local Contrast Normalization) not supported yet.";
+ ARMNN_LOG(warning) << "Lcr method (Jarret 2009: Local Contrast Normalization) not supported yet.";
return;
}
}
#include "CommandHandler.hpp"
#include "ProfilingService.hpp"
-#include <boost/log/trivial.hpp>
+#include <armnn/Logging.hpp>
namespace armnn
{
catch (const Exception& e)
{
// Log the error and continue
- BOOST_LOG_TRIVIAL(warning) << "An error has occurred when handling a command: " << e.what() << std::endl;
+ ARMNN_LOG(warning) << "An error has occurred when handling a command: " << e.what();
// Did we get here because the socket failed?
if ( !profilingConnection.IsOpen() )
{
#include "PeriodicCounterCapture.hpp"
-#include <boost/log/trivial.hpp>
+#include <armnn/Logging.hpp>
+
#include <iostream>
namespace armnn
catch (const Exception& e)
{
// Report the error and continue
- BOOST_LOG_TRIVIAL(warning) << "An error has occurred when getting a counter value: "
- << e.what() << std::endl;
+ ARMNN_LOG(warning) << "An error has occurred when getting a counter value: "
+ << e.what();
continue;
}
values.emplace_back(std::make_pair(requestedId, counterValue));
#include "ProfilingService.hpp"
-#include <boost/log/trivial.hpp>
+#include <armnn/Logging.hpp>
+
#include <boost/format.hpp>
namespace armnn
}
catch (const Exception& e)
{
- BOOST_LOG_TRIVIAL(warning) << "An error has occurred when creating the profiling connection: "
- << e.what() << std::endl;
+ ARMNN_LOG(warning) << "An error has occurred when creating the profiling connection: "
+ << e.what();
}
// Move to the next state
#include "SendCounterPacketTests.hpp"
+#include <armnn/Logging.hpp>
+
#include <CommandHandlerFunctor.hpp>
#include <IProfilingConnection.hpp>
-#include <Logging.hpp>
#include <ProfilingService.hpp>
#include <boost/polymorphic_cast.hpp>
#include "CaffePreprocessor.hpp"
#include <boost/numeric/conversion/cast.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/assert.hpp>
#include <boost/format.hpp>
//
#include "Cifar10Database.hpp"
+#include <armnn/Logging.hpp>
+
#include <boost/numeric/conversion/cast.hpp>
-#include <boost/log/trivial.hpp>
#include <fstream>
#include <vector>
std::ifstream fileStream(fullpath, std::ios::binary);
if (!fileStream.is_open())
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to load " << fullpath;
+ ARMNN_LOG(fatal) << "Failed to load " << fullpath;
return nullptr;
}
if (!fileStream.good())
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to read " << fullpath;
+ ARMNN_LOG(fatal) << "Failed to read " << fullpath;
return nullptr;
}
#include <armnn/TypesUtils.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/numeric/conversion/cast.hpp>
#include <array>
}
catch (const std::exception& e)
{
- BOOST_LOG_TRIVIAL(error) << "An error occurred when splitting tokens: " << e.what();
+ ARMNN_LOG(error) << "An error occurred when splitting tokens: " << e.what();
continue;
}
for (const std::string& token : tokens)
}
catch (const std::exception&)
{
- BOOST_LOG_TRIVIAL(error) << "'" << token << "' is not a valid number. It has been ignored.";
+ ARMNN_LOG(error) << "'" << token << "' is not a valid number. It has been ignored.";
}
}
}
}
catch (const InferenceTestImageException& e)
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to load image for test case " << testCaseId << ". Error: " << e.what();
+ ARMNN_LOG(fatal) << "Failed to load image for test case " << testCaseId << ". Error: " << e.what();
return nullptr;
}
}
catch (const InferenceTestImageException& e)
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to load image for test case " << testCaseId << ". Error: " << e.what();
+ ARMNN_LOG(fatal) << "Failed to load image for test case " << testCaseId << ". Error: " << e.what();
return nullptr;
}
#include "DeepSpeechV1Database.hpp"
#include <boost/assert.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/numeric/conversion/cast.hpp>
#include <boost/test/tools/floating_point_comparison.hpp>
{
if(!m_FloatComparer(output1[j], m_ExpectedOutputs.m_InputSeq[j]))
{
- BOOST_LOG_TRIVIAL(error) << "InputSeq for Lstm " << this->GetTestCaseId() <<
+ ARMNN_LOG(error) << "InputSeq for Lstm " << this->GetTestCaseId() <<
" is incorrect at" << j;
return TestCaseResult::Failed;
}
{
if(!m_FloatComparer(output2[j], m_ExpectedOutputs.m_StateH[j]))
{
- BOOST_LOG_TRIVIAL(error) << "StateH for Lstm " << this->GetTestCaseId() <<
+ ARMNN_LOG(error) << "StateH for Lstm " << this->GetTestCaseId() <<
" is incorrect";
return TestCaseResult::Failed;
}
{
if(!m_FloatComparer(output3[j], m_ExpectedOutputs.m_StateC[j]))
{
- BOOST_LOG_TRIVIAL(error) << "StateC for Lstm " << this->GetTestCaseId() <<
+ ARMNN_LOG(error) << "StateC for Lstm " << this->GetTestCaseId() <<
" is incorrect";
return TestCaseResult::Failed;
}
armnn::LogSeverity level = armnn::LogSeverity::Debug;
#endif
armnn::ConfigureLogging(true, true, level);
- armnnUtils::ConfigureLogging(boost::log::core::get().get(), true, true, level);
std::string testCasesFile;
// and that desc.add_options() can throw boost::io::too_few_args.
// They really won't in any of these cases.
BOOST_ASSERT_MSG(false, "Caught unexpected exception");
- BOOST_LOG_TRIVIAL(fatal) << "Fatal internal error: " << e.what();
+ ARMNN_LOG(fatal) << "Fatal internal error: " << e.what();
return EXIT_FAILURE;
}
// Check that the file exists.
if (!boost::filesystem::exists(testCasesFile))
{
- BOOST_LOG_TRIVIAL(fatal) << "Given file \"" << testCasesFile << "\" does not exist";
+ ARMNN_LOG(fatal) << "Given file \"" << testCasesFile << "\" does not exist";
return EXIT_FAILURE;
}
// Check that there is at least one test case to run
if (testCases.empty())
{
- BOOST_LOG_TRIVIAL(fatal) << "Given file \"" << testCasesFile << "\" has no test cases";
+ ARMNN_LOG(fatal) << "Given file \"" << testCasesFile << "\" has no test cases";
return EXIT_FAILURE;
}
#include "ImageTensorGenerator.hpp"
#include "../InferenceTestImage.hpp"
+#include <armnn/Logging.hpp>
#include <armnn/TypesUtils.hpp>
#include <boost/filesystem.hpp>
#include <boost/filesystem/operations.hpp>
#include <boost/filesystem/path.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/program_options.hpp>
#include <boost/variant.hpp>
}
catch (const InferenceTestImageException& e)
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to load image file " << imagePath << " with error: " << e.what();
+ ARMNN_LOG(fatal) << "Failed to load image file " << imagePath << " with error: " << e.what();
return -1;
}
imageDataContainers[0]);
if (!imageTensorFile)
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to write to output file" << outputPath;
+ ARMNN_LOG(fatal) << "Failed to write to output file" << outputPath;
imageTensorFile.close();
return -1;
}
}
else
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to open output file" << outputPath;
+ ARMNN_LOG(fatal) << "Failed to open output file" << outputPath;
return -1;
}
#include <boost/algorithm/string/join.hpp>
#include <boost/exception/exception.hpp>
#include <boost/exception/diagnostic_information.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/format.hpp>
#include <boost/program_options.hpp>
#include <boost/filesystem.hpp>
#include "../src/armnn/Profiling.hpp"
#include <boost/algorithm/string.hpp>
#include <boost/numeric/conversion/cast.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/filesystem/path.hpp>
#include <boost/assert.hpp>
#include <boost/format.hpp>
#if !defined (NDEBUG)
if (params.m_IterationCount > 0) // If just running a few select images then don't bother to warn.
{
- BOOST_LOG_TRIVIAL(warning) << "Performance test running in DEBUG build - results may be inaccurate.";
+ ARMNN_LOG(warning) << "Performance test running in DEBUG build - results may be inaccurate.";
}
#endif
inferenceTimesFile.open(params.m_InferenceTimesFile.c_str(), ios_base::trunc | ios_base::out);
if (!inferenceTimesFile.good())
{
- BOOST_LOG_TRIVIAL(error) << "Failed to open inference times file for writing: "
+ ARMNN_LOG(error) << "Failed to open inference times file for writing: "
<< params.m_InferenceTimesFile;
return false;
}
std::unique_ptr<IInferenceTestCase> warmupTestCase = testCaseProvider.GetTestCase(0);
if (warmupTestCase == nullptr)
{
- BOOST_LOG_TRIVIAL(error) << "Failed to load test case";
+ ARMNN_LOG(error) << "Failed to load test case";
return false;
}
}
catch (const TestFrameworkException& testError)
{
- BOOST_LOG_TRIVIAL(error) << testError.what();
+ ARMNN_LOG(error) << testError.what();
return false;
}
if (testCase == nullptr)
{
- BOOST_LOG_TRIVIAL(error) << "Failed to load test case";
+ ARMNN_LOG(error) << "Failed to load test case";
return false;
}
}
catch (const TestFrameworkException& testError)
{
- BOOST_LOG_TRIVIAL(error) << testError.what();
+ ARMNN_LOG(error) << testError.what();
result = TestCaseResult::Abort;
}
const double averageTimePerTestCaseMs = totalTime / nbProcessed * 1000.0f;
- BOOST_LOG_TRIVIAL(info) << std::fixed << std::setprecision(3) <<
+ ARMNN_LOG(info) << std::fixed << std::setprecision(3) <<
"Total time for " << nbProcessed << " test cases: " << totalTime << " seconds";
- BOOST_LOG_TRIVIAL(info) << std::fixed << std::setprecision(3) <<
+ ARMNN_LOG(info) << std::fixed << std::setprecision(3) <<
"Average time per test case: " << averageTimePerTestCaseMs << " ms";
// if profiling is enabled print out the results
if (!success)
{
- BOOST_LOG_TRIVIAL(error) << "One or more test cases failed";
+ ARMNN_LOG(error) << "One or more test cases failed";
return false;
}
#pragma once
#include <armnn/ArmNN.hpp>
+#include <armnn/Logging.hpp>
#include <armnn/TypesUtils.hpp>
#include "InferenceModel.hpp"
-#include <Logging.hpp>
-#include <boost/log/core/core.hpp>
#include <boost/program_options.hpp>
#include <boost/algorithm/string.hpp>
#include <boost/numeric/conversion/cast.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/filesystem/path.hpp>
#include <boost/assert.hpp>
#include <boost/format.hpp>
ClassifierResultProcessor resultProcessor(m_QuantizationParams.first, m_QuantizationParams.second);
boost::apply_visitor(resultProcessor, output);
- BOOST_LOG_TRIVIAL(info) << "= Prediction values for test #" << testCaseId;
+ ARMNN_LOG(info) << "= Prediction values for test #" << testCaseId;
auto it = resultProcessor.GetResultMap().rbegin();
for (int i=0; i<5 && it != resultProcessor.GetResultMap().rend(); ++i)
{
- BOOST_LOG_TRIVIAL(info) << "Top(" << (i+1) << ") prediction is " << it->second <<
+ ARMNN_LOG(info) << "Top(" << (i+1) << ") prediction is " << it->second <<
" with value: " << (it->first);
++it;
}
// If we're just running the defaultTestCaseIds, each one must be classified correctly.
if (params.m_IterationCount == 0 && prediction != m_Label)
{
- BOOST_LOG_TRIVIAL(error) << "Prediction for test case " << testCaseId << " (" << prediction << ")" <<
+ ARMNN_LOG(error) << "Prediction for test case " << testCaseId << " (" << prediction << ")" <<
" is incorrect (should be " << m_Label << ")";
return TestCaseResult::Failed;
}
// If a validation file was provided as input, it checks that the prediction matches.
if (!m_ValidationPredictions.empty() && prediction != m_ValidationPredictions[testCaseId])
{
- BOOST_LOG_TRIVIAL(error) << "Prediction for test case " << testCaseId << " (" << prediction << ")" <<
+ ARMNN_LOG(error) << "Prediction for test case " << testCaseId << " (" << prediction << ")" <<
" doesn't match the prediction in the validation file (" << m_ValidationPredictions[testCaseId] << ")";
return TestCaseResult::Failed;
}
{
const double accuracy = boost::numeric_cast<double>(m_NumCorrectInferences) /
boost::numeric_cast<double>(m_NumInferences);
- BOOST_LOG_TRIVIAL(info) << std::fixed << std::setprecision(3) << "Overall accuracy: " << accuracy;
+ ARMNN_LOG(info) << std::fixed << std::setprecision(3) << "Overall accuracy: " << accuracy;
// If a validation file was requested as output, the predictions are saved to it.
if (!m_ValidationFileOut.empty())
}
else
{
- BOOST_LOG_TRIVIAL(error) << "Failed to open output validation file: " << m_ValidationFileOut;
+ ARMNN_LOG(error) << "Failed to open output validation file: " << m_ValidationFileOut;
return false;
}
}
armnn::LogSeverity level = armnn::LogSeverity::Debug;
#endif
armnn::ConfigureLogging(true, true, level);
- armnnUtils::ConfigureLogging(boost::log::core::get().get(), true, true, level);
try
{
}
catch (armnn::Exception const& e)
{
- BOOST_LOG_TRIVIAL(fatal) << "Armnn Error: " << e.what();
+ ARMNN_LOG(fatal) << "Armnn Error: " << e.what();
return 1;
}
}
//
#include "MnistDatabase.hpp"
+#include <armnn/Logging.hpp>
+
#include <boost/numeric/conversion/cast.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/assert.hpp>
#include <fstream>
#include <vector>
if (!imageStream.is_open())
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to load " << imagePath;
+ ARMNN_LOG(fatal) << "Failed to load " << imagePath;
return nullptr;
}
if (!labelStream.is_open())
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to load " << imagePath;
+ ARMNN_LOG(fatal) << "Failed to load " << imagePath;
return nullptr;
}
imageStream.read(reinterpret_cast<char*>(&magic), sizeof(magic));
if (magic != 0x03080000)
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to read " << imagePath;
+ ARMNN_LOG(fatal) << "Failed to read " << imagePath;
return nullptr;
}
labelStream.read(reinterpret_cast<char*>(&magic), sizeof(magic));
if (magic != 0x01080000)
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to read " << labelPath;
+ ARMNN_LOG(fatal) << "Failed to read " << labelPath;
return nullptr;
}
if (!imageStream.good())
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to read " << imagePath;
+ ARMNN_LOG(fatal) << "Failed to read " << imagePath;
return nullptr;
}
if (!labelStream.good())
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to read " << labelPath;
+ ARMNN_LOG(fatal) << "Failed to read " << labelPath;
return nullptr;
}
#include <armnn/TypesUtils.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/numeric/conversion/cast.hpp>
#include <array>
}
catch (const InferenceTestImageException& e)
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to load image for test case " << testCaseId << ". Error: " << e.what();
+ ARMNN_LOG(fatal) << "Failed to load image for test case " << testCaseId << ". Error: " << e.what();
return nullptr;
}
#include "MobileNetSsdDatabase.hpp"
#include <boost/assert.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/numeric/conversion/cast.hpp>
#include <boost/test/tools/floating_point_comparison.hpp>
const size_t expectedNumDetections = m_DetectedObjects.size();
if (numDetections != expectedNumDetections)
{
- BOOST_LOG_TRIVIAL(error) << "Number of detections is incorrect: Expected (" <<
+ ARMNN_LOG(error) << "Number of detections is incorrect: Expected (" <<
expectedNumDetections << ")" << " but got (" << numDetections << ")";
return TestCaseResult::Failed;
}
{
if (it == detectedObjects.end())
{
- BOOST_LOG_TRIVIAL(error) << "No more detected objects found! Index out of bounds: " << i;
+ ARMNN_LOG(error) << "No more detected objects found! Index out of bounds: " << i;
return TestCaseResult::Abort;
}
if (detectedObject.m_Class != expectedObject.m_Class)
{
- BOOST_LOG_TRIVIAL(error) << "Prediction for test case " << this->GetTestCaseId() <<
+ ARMNN_LOG(error) << "Prediction for test case " << this->GetTestCaseId() <<
" is incorrect: Expected (" << expectedObject.m_Class << ")" <<
" but predicted (" << detectedObject.m_Class << ")";
return TestCaseResult::Failed;
if(!m_FloatComparer(detectedObject.m_Confidence, expectedObject.m_Confidence))
{
- BOOST_LOG_TRIVIAL(error) << "Confidence of prediction for test case " << this->GetTestCaseId() <<
+ ARMNN_LOG(error) << "Confidence of prediction for test case " << this->GetTestCaseId() <<
" is incorrect: Expected (" << expectedObject.m_Confidence << ") +- 1.0 pc" <<
" but predicted (" << detectedObject.m_Confidence << ")";
return TestCaseResult::Failed;
!m_FloatComparer(detectedObject.m_BoundingBox.m_XMax, expectedObject.m_BoundingBox.m_XMax) ||
!m_FloatComparer(detectedObject.m_BoundingBox.m_YMax, expectedObject.m_BoundingBox.m_YMax))
{
- BOOST_LOG_TRIVIAL(error) << "Detected bounding box for test case " << this->GetTestCaseId() <<
+ ARMNN_LOG(error) << "Detected bounding box for test case " << this->GetTestCaseId() <<
" is incorrect";
return TestCaseResult::Failed;
}
using namespace boost::filesystem;
armnn::LogSeverity level = armnn::LogSeverity::Debug;
armnn::ConfigureLogging(true, true, level);
- armnnUtils::ConfigureLogging(boost::log::core::get().get(), true, true, level);
// Set-up program Options
namespace po = boost::program_options;
std::string invalidBackends;
if (!CheckRequestedBackendsAreValid(computeDevice, armnn::Optional<std::string&>(invalidBackends)))
{
- BOOST_LOG_TRIVIAL(fatal) << "The list of preferred devices contains invalid backend IDs: "
- << invalidBackends;
+ ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: "
+ << invalidBackends;
return EXIT_FAILURE;
}
armnn::Status status;
{
std::stringstream message;
message << "armnn::Exception (" << e.what() << ") caught from optimize.";
- BOOST_LOG_TRIVIAL(fatal) << message.str();
+ ARMNN_LOG(fatal) << message.str();
return 1;
}
status = runtime->LoadNetwork(networkId, std::move(optimizedNet));
if (status == armnn::Status::Failure)
{
- BOOST_LOG_TRIVIAL(fatal) << "armnn::IRuntime: Failed to load network";
+ ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to load network";
return 1;
}
if (modelOutputLabelsPath.empty() || !boost::filesystem::exists(modelOutputLabelsPath) ||
!boost::filesystem::is_regular_file(modelOutputLabelsPath))
{
- BOOST_LOG_TRIVIAL(fatal) << "Invalid model output labels path at " << modelOutputLabelsPath;
+ ARMNN_LOG(fatal) << "Invalid model output labels path at " << modelOutputLabelsPath;
}
const std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels =
LoadModelOutputLabels(modelOutputLabelsPath);
size_t imageEndIndex;
if (imageIndexStrs.size() != 2)
{
- BOOST_LOG_TRIVIAL(fatal) << "Invalid validation range specification: Invalid format " << validationRange;
+ ARMNN_LOG(fatal) << "Invalid validation range specification: Invalid format " << validationRange;
return 1;
}
try
}
catch (const std::exception& e)
{
- BOOST_LOG_TRIVIAL(fatal) << "Invalid validation range specification: " << validationRange;
+ ARMNN_LOG(fatal) << "Invalid validation range specification: " << validationRange;
return 1;
}
if (!blacklistPath.empty() &&
!(boost::filesystem::exists(blacklistPath) && boost::filesystem::is_regular_file(blacklistPath)))
{
- BOOST_LOG_TRIVIAL(fatal) << "Invalid path to blacklist file at " << blacklistPath;
+ ARMNN_LOG(fatal) << "Invalid path to blacklist file at " << blacklistPath;
return 1;
}
}
else
{
- BOOST_LOG_TRIVIAL(fatal) << "Invalid Data layout: " << inputLayout;
+ ARMNN_LOG(fatal) << "Invalid Data layout: " << inputLayout;
return 1;
}
const unsigned int inputTensorWidth =
// Check output tensor shape is valid
if (modelOutputLabels.size() != outputNumElements)
{
- BOOST_LOG_TRIVIAL(fatal) << "Number of output elements: " << outputNumElements
+ ARMNN_LOG(fatal) << "Number of output elements: " << outputNumElements
<< " , mismatches the number of output labels: " << modelOutputLabels.size();
return 1;
}
}
else
{
- BOOST_LOG_TRIVIAL(fatal) << "Unsupported frontend: " << modelFormat;
+ ARMNN_LOG(fatal) << "Unsupported frontend: " << modelFormat;
return 1;
}
const NormalizationParameters& normParams = GetNormalizationParameters(modelFrontend, inputTensorDataType);
if (status == armnn::Status::Failure)
{
- BOOST_LOG_TRIVIAL(fatal) << "armnn::IRuntime: Failed to enqueue workload for image: " << imageName;
+ ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to enqueue workload for image: " << imageName;
}
checker.AddImageResult<TContainer>(imageName, outputDataContainers);
std::cout << "Top " << i << " Accuracy: " << checker.GetAccuracy(i) << "%" << "\n";
}
- BOOST_LOG_TRIVIAL(info) << "Accuracy Tool ran successfully!";
+ ARMNN_LOG(info) << "Accuracy Tool ran successfully!";
return 0;
}
catch (armnn::Exception const & e)
#include <chrono>
#include <vector>
#include <array>
-#include <boost/log/trivial.hpp>
#include "armnn/ArmNN.hpp"
#include "armnn/Utils.hpp"
{
// Configures logging for both the ARMNN library and this test program.
armnn::ConfigureLogging(true, true, level);
- armnnUtils::ConfigureLogging(boost::log::core::get().get(), true, true, level);
-
namespace po = boost::program_options;
std::vector<armnn::BackendId> computeDevice;
std::string invalidBackends;
if (!CheckRequestedBackendsAreValid(computeDevice, armnn::Optional<std::string&>(invalidBackends)))
{
- BOOST_LOG_TRIVIAL(fatal) << "The list of preferred devices contains invalid backend IDs: "
- << invalidBackends;
+ ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: "
+ << invalidBackends;
return EXIT_FAILURE;
}
{
std::stringstream message;
message << "armnn::Exception ("<<e.what()<<") caught from optimize.";
- BOOST_LOG_TRIVIAL(fatal) << message.str();
+ ARMNN_LOG(fatal) << message.str();
return 1;
}
status = runtime->LoadNetwork(networkId, std::move(optimizedNet));
if (status == armnn::Status::Failure)
{
- BOOST_LOG_TRIVIAL(fatal) << "armnn::IRuntime: Failed to load network";
+ ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to load network";
return 1;
}
armnnUtils::MakeOutputTensors(outputBindings, outputDataContainers));
if (status == armnn::Status::Failure)
{
- BOOST_LOG_TRIVIAL(fatal) << "armnn::IRuntime: Failed to enqueue workload";
+ ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to enqueue workload";
return 1;
}
}
if (!std::equal(output0.begin(), output0.end(), outputK.begin(), outputK.end()))
{
- BOOST_LOG_TRIVIAL(error) << "Multiple networks inference failed!";
+ ARMNN_LOG(error) << "Multiple networks inference failed!";
return 1;
}
}
}
- BOOST_LOG_TRIVIAL(info) << "Multiple networks inference ran successfully!";
+ ARMNN_LOG(info) << "Multiple networks inference ran successfully!";
return 0;
}
catch (armnn::Exception const& e)
#include "CsvReader.hpp"
#include "../InferenceTest.hpp"
-#include <Logging.hpp>
#include <Profiling.hpp>
#include <ResolveType.hpp>
}
catch (const std::exception& e)
{
- BOOST_LOG_TRIVIAL(error) << "An error occurred when splitting tokens: " << e.what();
+ ARMNN_LOG(error) << "An error occurred when splitting tokens: " << e.what();
continue;
}
for (const std::string& token : tokens)
}
catch (const std::exception&)
{
- BOOST_LOG_TRIVIAL(error) << "'" << token << "' is not a valid number. It has been ignored.";
+ ARMNN_LOG(error) << "'" << token << "' is not a valid number. It has been ignored.";
}
}
}
}
else
{
- BOOST_LOG_TRIVIAL(info) << "Output Tensor File: " << m_OutputTensorFile << " could not be opened!";
+ ARMNN_LOG(info) << "Output Tensor File: " << m_OutputTensorFile << " could not be opened!";
}
outputTensorFile.close();
}
else
{
std::string errorMessage = "Unsupported tensor data type " + dataTypeStr;
- BOOST_LOG_TRIVIAL(fatal) << errorMessage;
+ ARMNN_LOG(fatal) << errorMessage;
inputTensorFile.close();
throw armnn::Exception(errorMessage);
}
else
{
- BOOST_LOG_TRIVIAL(fatal) << "Unsupported tensor data type \"" << params.m_OutputTypes[i] << "\". ";
+ ARMNN_LOG(fatal) << "Unsupported tensor data type \"" << params.m_OutputTypes[i] << "\". ";
return EXIT_FAILURE;
}
}
if (params.m_GenerateTensorData)
{
- BOOST_LOG_TRIVIAL(warning) << "The input data was generated, note that the output will not be useful";
+ ARMNN_LOG(warning) << "The input data was generated, note that the output will not be useful";
}
// Print output tensors
boost::apply_visitor(printer, outputDataContainers[i]);
}
- BOOST_LOG_TRIVIAL(info) << "\nInference time: " << std::setprecision(2)
+ ARMNN_LOG(info) << "\nInference time: " << std::setprecision(2)
<< std::fixed << inference_duration.count() << " ms";
// If thresholdTime == 0.0 (default), then it hasn't been supplied at command line
if (params.m_ThresholdTime != 0.0)
{
- BOOST_LOG_TRIVIAL(info) << "Threshold time: " << std::setprecision(2)
+ ARMNN_LOG(info) << "Threshold time: " << std::setprecision(2)
<< std::fixed << params.m_ThresholdTime << " ms";
auto thresholdMinusInference = params.m_ThresholdTime - inference_duration.count();
- BOOST_LOG_TRIVIAL(info) << "Threshold time - Inference time: " << std::setprecision(2)
+ ARMNN_LOG(info) << "Threshold time - Inference time: " << std::setprecision(2)
<< std::fixed << thresholdMinusInference << " ms" << "\n";
if (thresholdMinusInference < 0)
{
- BOOST_LOG_TRIVIAL(fatal) << "Elapsed inference time is greater than provided threshold time.\n";
+ ARMNN_LOG(fatal) << "Elapsed inference time is greater than provided threshold time.\n";
return EXIT_FAILURE;
}
}
}
catch (armnn::Exception const& e)
{
- BOOST_LOG_TRIVIAL(fatal) << "Armnn Error: " << e.what();
+ ARMNN_LOG(fatal) << "Armnn Error: " << e.what();
return EXIT_FAILURE;
}
}
else
{
- BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'";
+ ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'";
return EXIT_FAILURE;
}
if ((inputTensorShapesVector.size() != 0) && (inputTensorShapesVector.size() != inputNamesVector.size()))
{
- BOOST_LOG_TRIVIAL(fatal) << "input-name and input-tensor-shape must have the same amount of elements.";
+ ARMNN_LOG(fatal) << "input-name and input-tensor-shape must have the same amount of elements.";
return EXIT_FAILURE;
}
if ((inputTensorDataFilePathsVector.size() != 0) &&
(inputTensorDataFilePathsVector.size() != inputNamesVector.size()))
{
- BOOST_LOG_TRIVIAL(fatal) << "input-name and input-tensor-data must have the same amount of elements.";
+ ARMNN_LOG(fatal) << "input-name and input-tensor-data must have the same amount of elements.";
return EXIT_FAILURE;
}
if ((outputTensorFilesVector.size() != 0) &&
(outputTensorFilesVector.size() != outputNamesVector.size()))
{
- BOOST_LOG_TRIVIAL(fatal) << "output-name and write-outputs-to-file must have the same amount of elements.";
+ ARMNN_LOG(fatal) << "output-name and write-outputs-to-file must have the same amount of elements.";
return EXIT_FAILURE;
}
}
else if ((inputTypesVector.size() != 0) && (inputTypesVector.size() != inputNamesVector.size()))
{
- BOOST_LOG_TRIVIAL(fatal) << "input-name and input-type must have the same amount of elements.";
+ ARMNN_LOG(fatal) << "input-name and input-type must have the same amount of elements.";
return EXIT_FAILURE;
}
}
else if ((outputTypesVector.size() != 0) && (outputTypesVector.size() != outputNamesVector.size()))
{
- BOOST_LOG_TRIVIAL(fatal) << "output-name and output-type must have the same amount of elements.";
+ ARMNN_LOG(fatal) << "output-name and output-type must have the same amount of elements.";
return EXIT_FAILURE;
}
}
catch (const armnn::InvalidArgumentException& e)
{
- BOOST_LOG_TRIVIAL(fatal) << "Cannot create tensor shape: " << e.what();
+ ARMNN_LOG(fatal) << "Cannot create tensor shape: " << e.what();
return EXIT_FAILURE;
}
}
// Check that threshold time is not less than zero
if (thresholdTime < 0)
{
- BOOST_LOG_TRIVIAL(fatal) << "Threshold time supplied as a command line argument is less than zero.";
+ ARMNN_LOG(fatal) << "Threshold time supplied as a command line argument is less than zero.";
return EXIT_FAILURE;
}
// Warn if ExecuteNetwork will generate dummy input data
if (params.m_GenerateTensorData)
{
- BOOST_LOG_TRIVIAL(warning) << "No input files provided, input tensors will be filled with 0s.";
+ ARMNN_LOG(warning) << "No input files provided, input tensors will be filled with 0s.";
}
// Forward to implementation based on the parser type
#if defined(ARMNN_SERIALIZER)
return MainImpl<armnnDeserializer::IDeserializer, float>(params, runtime);
#else
- BOOST_LOG_TRIVIAL(fatal) << "Not built with serialization support.";
+ ARMNN_LOG(fatal) << "Not built with serialization support.";
return EXIT_FAILURE;
#endif
}
#if defined(ARMNN_CAFFE_PARSER)
return MainImpl<armnnCaffeParser::ICaffeParser, float>(params, runtime);
#else
- BOOST_LOG_TRIVIAL(fatal) << "Not built with Caffe parser support.";
+ ARMNN_LOG(fatal) << "Not built with Caffe parser support.";
return EXIT_FAILURE;
#endif
}
#if defined(ARMNN_ONNX_PARSER)
return MainImpl<armnnOnnxParser::IOnnxParser, float>(params, runtime);
#else
- BOOST_LOG_TRIVIAL(fatal) << "Not built with Onnx parser support.";
+ ARMNN_LOG(fatal) << "Not built with Onnx parser support.";
return EXIT_FAILURE;
#endif
}
#if defined(ARMNN_TF_PARSER)
return MainImpl<armnnTfParser::ITfParser, float>(params, runtime);
#else
- BOOST_LOG_TRIVIAL(fatal) << "Not built with Tensorflow parser support.";
+ ARMNN_LOG(fatal) << "Not built with Tensorflow parser support.";
return EXIT_FAILURE;
#endif
}
#if defined(ARMNN_TF_LITE_PARSER)
if (! isModelBinary)
{
- BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
+ ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
for tflite files";
return EXIT_FAILURE;
}
return MainImpl<armnnTfLiteParser::ITfLiteParser, float>(params, runtime);
#else
- BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat <<
+ ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat <<
"'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'";
return EXIT_FAILURE;
#endif
}
else
{
- BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat <<
+ ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat <<
"'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'";
return EXIT_FAILURE;
}
// and that desc.add_options() can throw boost::io::too_few_args.
// They really won't in any of these cases.
BOOST_ASSERT_MSG(false, "Caught unexpected exception");
- BOOST_LOG_TRIVIAL(fatal) << "Fatal internal error: " << e.what();
+ ARMNN_LOG(fatal) << "Fatal internal error: " << e.what();
return EXIT_FAILURE;
}
std::string invalidBackends;
if (!CheckRequestedBackendsAreValid(computeDevices, armnn::Optional<std::string&>(invalidBackends)))
{
- BOOST_LOG_TRIVIAL(fatal) << "The list of preferred devices contains invalid backend IDs: "
+ ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: "
<< invalidBackends;
return EXIT_FAILURE;
}
#include "YoloDatabase.hpp"
#include <armnn/Exceptions.hpp>
+#include <armnn/Logging.hpp>
#include <array>
#include <cstdint>
#include <boost/assert.hpp>
#include <boost/format.hpp>
-#include <boost/log/trivial.hpp>
#include <boost/numeric/conversion/cast.hpp>
#include "InferenceTestImage.hpp"
}
catch (const InferenceTestImageException& e)
{
- BOOST_LOG_TRIVIAL(fatal) << "Failed to load test case " << testCaseId << " with error: " << e.what();
+ ARMNN_LOG(fatal) << "Failed to load test case " << testCaseId << " with error: " << e.what();
return nullptr;
}
const YoloDetectedObject& detectedObject = *outputIt;
if (detectedObject.m_Class != expectedDetection.m_Class)
{
- BOOST_LOG_TRIVIAL(error) << "Prediction for test case " << this->GetTestCaseId() <<
+ ARMNN_LOG(error) << "Prediction for test case " << this->GetTestCaseId() <<
" is incorrect: Expected (" << expectedDetection.m_Class << ")" <<
" but predicted (" << detectedObject.m_Class << ")";
return TestCaseResult::Failed;
!m_FloatComparer(detectedObject.m_Box.m_H, expectedDetection.m_Box.m_H) ||
!m_FloatComparer(detectedObject.m_Confidence, expectedDetection.m_Confidence))
{
- BOOST_LOG_TRIVIAL(error) << "Detected bounding box for test case " << this->GetTestCaseId() <<
+ ARMNN_LOG(error) << "Detected bounding box for test case " << this->GetTestCaseId() <<
" is incorrect";
return TestCaseResult::Failed;
}