$(NNTRAINER_ROOT)/nntrainer/tensor/blas_interface.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/layer_node.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/input_layer.cpp \
- $(NNTRAINER_ROOT)/nntrainer/layers/output_layer.cpp \
+ $(NNTRAINER_ROOT)/nntrainer/layers/multiout_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/fc_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/bn_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/loss/loss_layer.cpp \
#include <input_layer.h>
#include <lstm.h>
#include <mse_loss_layer.h>
+#include <multiout_layer.h>
#include <nntrainer_error.h>
-#include <output_layer.h>
#include <parse_util.h>
#include <permute_layer.h>
#include <plugged_layer.h>
ActivationLayer::type, LayerType::LAYER_ACTIVATION);
ac.registerFactory(nntrainer::createLayer<AdditionLayer>, AdditionLayer::type,
LayerType::LAYER_ADDITION);
- ac.registerFactory(nntrainer::createLayer<OutputLayer>, OutputLayer::type,
+ ac.registerFactory(nntrainer::createLayer<MultiOutLayer>, MultiOutLayer::type,
LayerType::LAYER_MULTIOUT);
ac.registerFactory(nntrainer::createLayer<ConcatLayer>, ConcatLayer::type,
LayerType::LAYER_CONCAT);
#include <cross_entropy_softmax_loss_layer.h>
#include <flatten_layer.h>
#include <input_layer.h>
+#include <multiout_layer.h>
#include <network_graph.h>
#include <nntrainer_error.h>
#include <nntrainer_log.h>
-#include <output_layer.h>
#include <parse_util.h>
#include <profiler.h>
#include <rnn.h>
if (in_node->getNumOutputConnections() <= 1)
return ML_ERROR_NONE;
- std::shared_ptr<LayerNode> lnode = createLayerNode(OutputLayer::type);
+ std::shared_ptr<LayerNode> lnode = createLayerNode(MultiOutLayer::type);
graph.ensureName(*lnode, in_node->getName());
lnode->setInputLayers({in_node->getName()});
*/
for (unsigned int i = 0; i < graph.size(); ++i) {
auto const &lnode = LNODE(*(cbegin() + i));
- if (lnode->getType() != OutputLayer::type &&
+ if (lnode->getType() != MultiOutLayer::type &&
lnode->getType() != SplitLayer::type) {
status = realizeMultiOutputType(lnode);
NN_RETURN_STATUS();
'activation_layer.cpp',
'addition_layer.cpp',
'concat_layer.cpp',
- 'output_layer.cpp',
'bn_layer.cpp',
'conv2d_layer.cpp',
'fc_layer.cpp',
'flatten_layer.cpp',
'input_layer.cpp',
- 'output_layer.cpp',
+ 'multiout_layer.cpp',
'layer_node.cpp',
'pooling2d_layer.cpp',
'preprocess_flip_layer.cpp',
/**
* Copyright (C) 2020 Jijoong Moon <jijoong.moon@samsung.com>
*
- * @file output_layer.cpp
+ * @file multiout_layer.cpp
* @date 05 Nov 2020
* @see https://github.com/nnstreamer/nntrainer
* @author Jijoong Moon <jijoong.moon@samsung.com>
*/
#include <cstring>
+#include <multiout_layer.h>
#include <nntrainer_error.h>
#include <nntrainer_log.h>
-#include <output_layer.h>
#include <parse_util.h>
#include <util_func.h>
static constexpr size_t SINGLE_INOUT_IDX = 0;
-void OutputLayer::finalize(InitLayerContext &context) {
+void MultiOutLayer::finalize(InitLayerContext &context) {
std::vector<TensorDim> out_dims(context.getNumOutputs());
const TensorDim &in_dim = context.getInputDimensions()[0];
context.setOutputDimensions(out_dims);
}
-void OutputLayer::forwarding(RunLayerContext &context, bool training) {
+void MultiOutLayer::forwarding(RunLayerContext &context, bool training) {
const Tensor &input_ = context.getInput(SINGLE_INOUT_IDX);
for (unsigned int idx = 0; idx < context.getNumOutputs(); ++idx) {
context.getOutput(idx).fill(input_);
}
}
-void OutputLayer::calcDerivative(RunLayerContext &context) {
+void MultiOutLayer::calcDerivative(RunLayerContext &context) {
Tensor &ret = context.getOutgoingDerivative(SINGLE_INOUT_IDX);
for (unsigned int idx = 0; idx < context.getNumOutputs(); ++idx) {
if (idx == 0) {
}
}
-void OutputLayer::setProperty(const std::vector<std::string> &values) {
+void MultiOutLayer::setProperty(const std::vector<std::string> &values) {
if (!values.empty()) {
std::string msg = "[FlattenLayer] Unknown Layer Properties count " +
std::to_string(values.size());
/**
* Copyright (C) 2020 Jijoong Moon <jijoong.moon@samsung.com>
*
- * @file output_layer.h
+ * @file multiout_layer.h
* @date 05 Nov 2020
* @see https://github.com/nnstreamer/nntrainer
* @author Jijoong Moon <jijoong.moon@samsung.com>
* @todo Support inplace for this layer
*/
-#ifndef __OUTPUT_LAYER_H__
-#define __OUTPUT_LAYER_H__
+#ifndef __MULTIOUT_LAYER_H__
+#define __MULTIOUT_LAYER_H__
#ifdef __cplusplus
#include <layer_devel.h>
namespace nntrainer {
/**
- * @class Output Layer
- * @brief Output Layer
+ * @class Multiout Layer
+ * @brief Multiout Layer
*/
-class OutputLayer : public Layer {
+class MultiOutLayer : public Layer {
public:
/**
- * @brief Constructor of Output Layer
+ * @brief Constructor of Multiout Layer
*/
- OutputLayer() : Layer() {}
+ MultiOutLayer() : Layer() {}
/**
- * @brief Destructor of Output Layer
+ * @brief Destructor of Multiout Layer
*/
- ~OutputLayer() = default;
+ ~MultiOutLayer() = default;
/**
- * @brief Move constructor of OutputLayer.
- * @param[in] OutputLayer &&
+ * @brief Move constructor of MultiOutLayer.
+ * @param[in] MultiOutLayer &&
*/
- OutputLayer(OutputLayer &&rhs) noexcept = default;
+ MultiOutLayer(MultiOutLayer &&rhs) noexcept = default;
/**
* @brief Move assignment operator.
- * @parma[in] rhs OutputLayer to be moved.
+ * @parma[in] rhs MultiOutLayer to be moved.
*/
- OutputLayer &operator=(OutputLayer &&rhs) = default;
+ MultiOutLayer &operator=(MultiOutLayer &&rhs) = default;
/**
* @copydoc Layer::finalize(InitLayerContext &context)
/**
* @copydoc Layer::getType()
*/
- const std::string getType() const override { return OutputLayer::type; };
+ const std::string getType() const override { return MultiOutLayer::type; };
inline static const std::string type = "multiout";
};
} // namespace nntrainer
#endif /* __cplusplus */
-#endif /* __OUTPUT_LAYER_H__ */
+#endif /* __MULTIOUT_LAYER_H__ */
if (has_fixed_size) {
{
- auto [last, ins, labels] = generator();
+ auto result = generator();
+ bool last = std::get<0>(result);
EXPECT_TRUE(last);
}
#include <gtest/gtest.h>
#include <layers_common_tests.h>
-#include <output_layer.h>
+#include <multiout_layer.h>
auto semantic_output =
- LayerSemanticsParamType(nntrainer::createLayer<nntrainer::OutputLayer>,
- nntrainer::OutputLayer::type, {}, 0, false);
+ LayerSemanticsParamType(nntrainer::createLayer<nntrainer::MultiOutLayer>,
+ nntrainer::MultiOutLayer::type, {}, 0, false);
INSTANTIATE_TEST_CASE_P(Output, LayerSemantics,
::testing::Values(semantic_output));
/**
* @brief Ini file unittest with backbone
- * @note Output layer name not found, epmty backbone
+ * @note Multi Output layer name not found, epmty backbone
*/
TEST(nntrainerIniTest, backbone_n_18) {
nntrainer::NeuralNetwork NN;
#include <input_layer.h>
#include <layer.h>
+#include <multiout_layer.h>
#include <neuralnet.h>
-#include <output_layer.h>
#include <weight.h>
#include "nntrainer_test_util.h"
}
if (!next_node.node->supportInPlace() &&
- getNodeType() != nntrainer::OutputLayer::type)
+ getNodeType() != nntrainer::MultiOutLayer::type)
verify(out, expected_output, err_msg + " at output");
}
void NodeWatcher::backward(int iteration, bool verify_deriv, bool verify_grad) {
- if (getNodeType() == nntrainer::OutputLayer::type) {
+ if (getNodeType() == nntrainer::MultiOutLayer::type) {
return;
}
void GraphWatcher::readIteration(std::ifstream &f) {
for (auto &i : nodes) {
- if (i.getNodeType() == nntrainer::OutputLayer::type) {
+ if (i.getNodeType() == nntrainer::MultiOutLayer::type) {
continue;
}