Remove extra property "in_length" from embedding layer.
in_length was used to set and provide the number of inputs to be
provided to the embedding layer. However, this must be fixed
based on the input provided from the previous layer.
This patch removes this property and let this value be inferred
from the given input dimensions.
Signed-off-by: Parichay Kapoor <kparichay@gmail.com>
Type = embedding
in_dim = 6 # in dim must be more than len(set(user ids)) + 1
out_dim = 5
-in_length = 1
[product_embed]
input_layers = split
Type = embedding
in_dim = 6 # in dim must be more than len(set(product ids)) + 1
out_dim = 5
-in_length = 1
[concat]
input_layers = user_embed, product_embed
* - random_translate
* - in_dim : int ( input dimension for embedding layer )
* - out_dim : int ( output dimesion for embedding layer )
- * - in_length : int ( input length for embedding layer )
* - recurrent_activation : string (type) - used only in lstm
* - return_sequences : bool (type) - used only in lstm
* - distribute : bool
TensorDim output_dim = input_dim;
- output_dim.height(in_length);
+ output_dim.height(input_dim.width());
output_dim.width(out_dim);
context.setOutputDimensions({output_dim});
status = setUint(out_dim, value);
throw_status(status);
} break;
- case PropertyType::in_length: {
- status = setUint(in_length, value);
- throw_status(status);
- } break;
default:
LayerImpl::setProperty(type_str, value);
break;
for (unsigned int b = 0; b < input_.batch(); ++b) {
float *in_data = input_.getAddress(b * input_.getDim().getFeatureLen());
- for (unsigned int i = 0; i < in_length; ++i) {
+ for (unsigned int i = 0; i < input_.width(); ++i) {
if (in_data[i] > in_dim) {
throw std::invalid_argument("input word index is greater than in_dim");
}
for (unsigned int b = 0; b < input_.batch(); ++b) {
float *in_data = input_.getAddress(b * input_.getDim().getFeatureLen());
- for (unsigned int i = 0; i < in_length; ++i) {
+ for (unsigned int i = 0; i < input_.width(); ++i) {
// Assume padding is 0 and index always start from 1.
// If in_data[i] - 1 < 0, then it skips.
if (in_data[i] - 1 < 0)
/**
* @brief Constructor of Embedding Layer
*/
- EmbeddingLayer(unsigned int in_dim_ = 0, unsigned int out_dim_ = 0,
- unsigned int in_length_ = 0) :
+ EmbeddingLayer(unsigned int in_dim_ = 0, unsigned int out_dim_ = 0) :
LayerImpl(),
in_dim(in_dim_),
out_dim(out_dim_),
- in_length(in_length_),
weight_idx(0) {}
/**
private:
unsigned int in_dim;
unsigned int out_dim;
- unsigned int in_length;
unsigned int weight_idx;
/**
* 28. random_translate
* 29. in_dim : int ( input dimension for embedding layer )
* 30. out_dim : int ( output dimesion for embedding layer )
- * 31. in_length : int ( input length for embedding layer )
- * 32. recurrent_activation : string (type) - lstm
- * 33. distribute : bool
- * 34. split_dimension : string (type)
- * 35. return_sequences : bool (type) - lstm
- * 36. hidden_state_activation : string (type) - lstm
- * 37. dropout : float (type) - drop out rate
+ * 31. recurrent_activation : string (type) - lstm
+ * 32. distribute : bool
+ * 33. split_dimension : string (type)
+ * 34. return_sequences : bool (type) - lstm
+ * 35. hidden_state_activation : string (type) - lstm
+ * 36. dropout : float (type) - drop out rate
*/
enum class PropertyType {
input_shape = 0,
random_translate = 28,
in_dim = 29,
out_dim = 30,
- in_length = 31,
- recurrent_activation = 32,
- distribute = 33,
- split_dimension = 34,
- return_sequences = 35,
- hidden_state_activation = 36,
- dropout = 37,
+ recurrent_activation = 31,
+ distribute = 32,
+ split_dimension = 33,
+ return_sequences = 34,
+ hidden_state_activation = 35,
+ dropout = 36,
unknown
};
* random_translate = 28
* in_dim = 29
* out_dim = 30
- * in_length = 31
- * recurrent_activation = 32
- * distribute = 33
- * split_dimension = 34
- * return_sequences = 35
- * hidden_state_activation = 36
- * dropout = 37
+ * recurrent_activation = 31
+ * distribute = 32
+ * split_dimension = 33
+ * return_sequences = 34
+ * hidden_state_activation = 35
+ * dropout = 36
*
* InputLayer has 0, 1, 2, 3 properties.
* FullyConnectedLayer has 1, 4, 6, 7, 8, 9 properties.
* Pooling2DLayer has 12, 13, 14, 15 properties.
* BatchNormalizationLayer has 0, 1, 5, 6, 7 properties.
*/
-static std::array<std::string, 39> property_string = {
+static std::array<std::string, 38> property_string = {
"input_shape",
"normalization",
"standardization",
"random_translate",
"in_dim",
"out_dim",
- "in_length",
"recurrent_activation",
"distribute",
"split_dimension",
auto semantic_concat =
LayerSemanticsParamType(nntrainer::createLayer<nntrainer::ConcatLayer>,
- nntrainer::ConcatLayer::type, {}, {}, 0, false);
+ nntrainer::ConcatLayer::type, {}, 0, false);
INSTANTIATE_TEST_CASE_P(Concat, LayerSemantics,
::testing::Values(semantic_concat));
auto semantic_embedding = LayerSemanticsParamType(
nntrainer::createLayer<nntrainer::EmbeddingLayer>,
- nntrainer::EmbeddingLayer::type, {"in_length=1", "out_dim=1", "in_dim=1"}, {},
- 0, false);
+ nntrainer::EmbeddingLayer::type, {"out_dim=1", "in_dim=1"}, 0, false);
INSTANTIATE_TEST_CASE_P(Embedding, LayerSemantics,
::testing::Values(semantic_embedding));
auto semantic_split =
LayerSemanticsParamType(nntrainer::createLayer<nntrainer::SplitLayer>,
- nntrainer::SplitLayer::type, {}, {}, 0, false);
+ nntrainer::SplitLayer::type, {}, 0, false);
INSTANTIATE_TEST_CASE_P(Split, LayerSemantics,
::testing::Values(semantic_split));
virtual void prepareLayer() {
int status = setProperty("in_dim=50 |"
- "out_dim=8 |"
- "in_length=12");
+ "out_dim=8");
EXPECT_EQ(status, ML_ERROR_NONE);
setBatch(3);
}