+++ /dev/null
-/*
- * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @file NNAPIConvert.h
- * @brief This file contains convereter(s)\n
- * from NNAPI frontend's struct to neurun's internal struct
- */
-#ifndef __NEURUN_UTIL_NNAPI_CONVERT_H__
-#define __NEURUN_UTIL_NNAPI_CONVERT_H__
-
-#include <NeuralNetworks.h>
-
-#include "model/operand/TypeInfo.h"
-#include "model/operand/Shape.h"
-
-namespace neurun
-{
-namespace util
-{
-
-/**
- * @brief Convert data type from NNAPI to internal data type
- * @param[in] type NNAPI's data type
- * @return neurun's internal data type
- * @note Now neurun::model::operand::DataType shares the same enum value\n
- with OperandCode in NeuralNetworks.h.\n
- If we don't share same value, we must fix this mapping function.
- */
-model::operand::DataType getDataType(OperandCode type);
-
-/**
- * @brief Convert operand type info from NNAPI to interanl operand type info
- * @param[in] type NNAPI's operand type
- * @return neurun's internal operand type info
- */
-model::operand::TypeInfo getTypeInfo(const ANeuralNetworksOperandType *type);
-
-/**
- * @brief Convert operand shape info from NNAPI to internal operand shape
- * @param[in] type NNAPI's operand type
- * @return neurun's internal operand shape
- */
-model::operand::Shape getShape(const ANeuralNetworksOperandType *type);
-
-} // namespace neurun
-} // namespace util
-
-#endif // __NEURUN_UTIL_NNAPI_CONVERT_H__
* limitations under the License.
*/
-#include "util/NNAPIConvert.h"
+#include "NNAPIConvert.h"
-namespace neurun
-{
-namespace util
-{
+using namespace ::neurun::model;
-model::operand::DataType getDataType(OperandCode type)
+operand::DataType NNAPIConvert::getDataType(OperandCode type)
{
// Now neurun::model::operand::DataType shares the same enum value with OperandCode
// in NeuralNetworks.h.
// If we don't share same value, we must fix this mapping function.
- return static_cast<model::operand::DataType>(static_cast<uint32_t>(type));
+ return static_cast<operand::DataType>(static_cast<uint32_t>(type));
}
-model::operand::TypeInfo getTypeInfo(const ANeuralNetworksOperandType *type)
+operand::TypeInfo NNAPIConvert::getTypeInfo(const ANeuralNetworksOperandType *type)
{
- return model::operand::TypeInfo(getDataType((OperandCode)(type->type)), type->scale,
- type->zeroPoint);
+ return operand::TypeInfo(getDataType((OperandCode)(type->type)), type->scale, type->zeroPoint);
}
-model::operand::Shape getShape(const ANeuralNetworksOperandType *type)
+operand::Shape NNAPIConvert::getShape(const ANeuralNetworksOperandType *type)
{
- model::operand::Shape shape(type->dimensionCount);
+ operand::Shape shape(type->dimensionCount);
for (uint32_t axis = 0; axis < type->dimensionCount; ++axis)
{
return shape;
}
-
-} // namespace util
-} // namespace neurun
--- /dev/null
+/*
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @file NNAPIConvert.h
+ * @brief This file contains convereter(s)\n
+ * from NNAPI frontend's struct to neurun's internal struct
+ */
+#ifndef __NEURUN_NNAPI_CONVERT_H__
+#define __NEURUN_NNAPI_CONVERT_H__
+
+#include <NeuralNetworks.h>
+
+#include <model/operand/TypeInfo.h>
+#include <model/operand/Shape.h>
+
+class NNAPIConvert
+{
+
+public:
+ /**
+ * @brief Convert data type from NNAPI to internal data type
+ * @param[in] type NNAPI's data type
+ * @return neurun's internal data type
+ * @note Now neurun::model::operand::DataType shares the same enum value\n
+ with OperandCode in NeuralNetworks.h.\n
+ If we don't share same value, we must fix this mapping function.
+ */
+ static ::neurun::model::operand::DataType getDataType(OperandCode type);
+
+ /**
+ * @brief Convert operand type info from NNAPI to interanl operand type info
+ * @param[in] type NNAPI's operand type
+ * @return neurun's internal operand type info
+ */
+ static ::neurun::model::operand::TypeInfo getTypeInfo(const ANeuralNetworksOperandType *type);
+
+ /**
+ * @brief Convert operand shape info from NNAPI to internal operand shape
+ * @param[in] type NNAPI's operand type
+ * @return neurun's internal operand shape
+ */
+ static ::neurun::model::operand::Shape getShape(const ANeuralNetworksOperandType *type);
+};
+
+#endif // __NEURUN_NNAPI_CONVERT_H__
*/
#include "execution.h"
-#include "util/NNAPIConvert.h"
+#include "NNAPIConvert.h"
#include "util/logging.h"
const neurun::model::operand::Index
const neurun::model::operand::Index index) noexcept
{
const auto operand_type = _executor->model().operands.at(index).typeInfo();
- const auto typeInfo = ::neurun::util::getTypeInfo(type);
+ const auto typeInfo = NNAPIConvert::getTypeInfo(type);
if (operand_type != typeInfo)
{
const neurun::model::operand::Index index) noexcept
{
const auto operand_shape = _executor->model().operands.at(index).shape();
- const auto shape_from_type = ::neurun::util::getShape(type);
+ const auto shape_from_type = NNAPIConvert::getShape(type);
// Passed shape should be specified
if (shape_from_type.element_nums() == 0)
bool unspecified = haveUnspecifiedDims(operand_index);
const auto type_info = _executor->model().operands.at(operand_index).typeInfo();
- const auto shape = (unspecified ? neurun::util::getShape(type)
+ const auto shape = (unspecified ? NNAPIConvert::getShape(type)
: _executor->model().operands.at(operand_index).shape());
_executor->setInput(input_index, type_info, shape, buffer, length);
bool unspecified = haveUnspecifiedDims(operand_index);
const auto type_info = _executor->model().operands.at(operand_index).typeInfo();
- const auto shape = (unspecified ? neurun::util::getShape(type)
+ const auto shape = (unspecified ? NNAPIConvert::getShape(type)
: _executor->model().operands.at(operand_index).shape());
_executor->setOutput(output_index, type_info, shape, buffer, length);
#include "model.h"
#include "OperationFactory.h"
+#include "NNAPIConvert.h"
#include "model/operation/Node.Include.h"
#include "util/logging.h"
-#include "util/NNAPIConvert.h"
#include "cpp14/memory.h"
{
try
{
- const auto shape = neurun::util::getShape(type);
- const auto typeInfo = neurun::util::getTypeInfo(type);
+ const auto shape = NNAPIConvert::getShape(type);
+ const auto typeInfo = NNAPIConvert::getTypeInfo(type);
_model->addOperand(shape, typeInfo);
}
catch (const std::exception &e)