--- /dev/null
+# Embedding
+
+This application contains embedding layer example with two example model.
+One with single input, another assuming there are two inputs(using split layer)
+
+## Example Model Structure
+
+There are two structure ready for the example
+
+1. Linear Embedding structure
+ (Input: 10:1:1:4 )
+ (Embedding: 10:1:4:8 )
+ (Flatten: 10:1:1:32)
+ (FullyConnected: 10:1:1:1 )
+
+2. Splitted data and seperate embedding structure
+ (Input: 10:1:2:2 )
+ (split: 10:1:1:1, 10:1:1:1 )
+ (Embedding1: 10:1:1:8 ), (Embedding2: 10:1:1:8 )
+ (concat: 10:2:1:8 ),
+ (Flatten: 10:1:1:16),
+ (FullyConnected: 10:1:1:1 )
+
+## How to run a train epoch
+
+Once you compile, with `meson`, please file an issue if you have a problem running the example.
+
+```bash
+export ${res}
+$ cd ${build_dir}
+$ meson test app_embedding -v #this is for the first model structure
+$ meson test app_embedding_split -v #this is for the second model structure
+```
+
+Expected output is as below...
+
+```bash
+#1/100 - Training Loss: 0.692463 >> [ Accuracy: 100% - Validation Loss : 0.690833 ]
+...
+#100/100 - Training Loss: 0.535373 >> [ Accuracy: 100% - Validation Loss : 0.53367 ]
+```
*/
std::vector<std::vector<float>> inputVector, outputVector;
nntrainer::NeuralNetwork NN;
- std::shared_ptr<ml::train::Layer> layer;
- std::shared_ptr<ml::train::Layer> layer_fc;
- std::shared_ptr<nntrainer::Layer> layer_;
- std::shared_ptr<nntrainer::Layer> layer_fc_;
- std::string name = "embedding";
- std::string fc_name = "outputlayer";
- nntrainer::Tensor weight;
- nntrainer::Tensor weight_fc;
/**
* @brief Initialize NN with configuration file path
*/
std::cout << "Input dimension: " << NN.getInputDimension()[0];
- NN.getLayer(name.c_str(), &layer);
- NN.getLayer(fc_name.c_str(), &layer_fc);
-
- layer_ = nntrainer::getLayerDevel(layer);
- layer_fc_ = nntrainer::getLayerDevel(layer_fc);
-
- weight = layer_->getWeights()[0].getVariable();
- weight.print(std::cout);
-
} catch (...) {
std::cerr << "Unexpected Error during init" << std::endl;
return 1;
return 1;
}
- weight = layer_->getWeights()[0].getVariable();
- weight.print(std::cout);
-
/****** testing with a golden data if any ********/
nntrainer::Tensor golden(1, 1, 15, 8);
nntrainer::Tensor weight_out_fc(1, 1, 32, 1);
loadFile("fc_weight_golden.out", weight_out_fc);
weight_out_fc.print(std::cout);
-
- weight_fc = layer_fc_->getWeights()[0].getVariable();
- weight_fc.print(std::cout);
} catch (...) {
std::cerr << "Warning: during loading golden data\n";
}
nntr_app_resdir / 'Embedding' / 'Embedding.ini',
nntr_app_resdir / 'Embedding' / 'embedding_input.txt']
)
+
+# test split example
+test('app_embedding_split', e, args: ['train',
+ nntr_app_resdir / 'Embedding' / 'Embedding_split.ini',
+ nntr_app_resdir / 'Embedding' / 'embedding_input.txt']
+)
# Model Section : Model
[Model]
-Type = Regression # Network Type : Regression, KNN, NeuralNetwork
-Learning_rate = 0.001 # Learning Rate
+Type = NeuralNetwork
Epochs = 100 # Epochs
-Optimizer = adam # Optimizer : sgd (stochastic gradien decent),
- # adam (Adamtive Moment Estimation)
Loss = cross # Loss function : mse (mean squared error)
# cross ( cross entropy )
-Save_Path = "embedding_model.bin" # model path to save / read
+Save_Path = "embedding_model_split.bin" # model path to save / read
batch_size = 10 # batch size
+
+[Optimizer]
+Type = adam
beta1 = 0.9
beta2 = 0.999
+Learning_rate = 0.001
epsilon = 1e-7
-# Layer Section : Name
[embedding]
Type = embedding
in_dim = 15
--- /dev/null
+# Model Section : Model
+[Model]
+Type = NeuralNetwork
+Epochs = 100 # Epochs
+Loss = cross # Loss function : mse (mean squared error)
+ # cross ( cross entropy )
+Save_Path = "embedding_model_split.bin" # model path to save / read
+batch_size = 10 # batch size
+
+[Optimizer]
+Type = adam
+beta1 = 0.9
+beta2 = 0.999
+Learning_rate = 0.001
+epsilon = 1e-7
+
+[input]
+Type = input
+input_shape = 1:2:2 # channel:height:width
+
+[split]
+Type = split
+split_dimension = 2 # split by height
+
+[embedding1]
+input_layers = split
+Type = embedding
+in_dim = 15
+out_dim = 8
+in_length = 1
+
+[embedding2]
+input_layers = split
+Type = embedding
+in_dim = 15
+out_dim = 8
+in_length = 1
+
+[concat]
+input_layers = embedding1, embedding2
+Type = concat
+
+[flatten]
+Type = flatten
+
+[outputlayer]
+Type = fully_connected
+input_layers = flatten
+Unit = 1
+Bias_initializer = zeros
+Activation = sigmoid