dim(d),
strides{{1, 2, 3}},
is_contiguous(true),
- data(new float[d.getDataLen()], std::default_delete<float[]>()) {
+ data(d.getDataLen() == 0
+ ? nullptr
+ : std::shared_ptr<float>(new float[d.getDataLen()],
+ std::default_delete<float[]>())) {
+ if (d.getDataLen() == 0) {
+ if (buf != nullptr) {
+ throw std::runtime_error(
+ "Tensor dimension and source buffer size mismatch");
+ }
+ return;
+ }
+
// todo: initialize appropriate strides
if (buf != nullptr) {
float *data = getData();
}
void TensorDim::swap(TensorDim &lhs, TensorDim &rhs) noexcept {
- std::swap(lhs.dim, rhs.dim);
+ std::swap_ranges(std::begin(lhs.dim), std::begin(lhs.dim) + MAXDIM,
+ std::begin(rhs.dim));
std::swap(lhs.len, rhs.len);
std::swap(lhs.feature_len, rhs.feature_len);
}
"[TensorDim] Trying to assign value of 0 to tensor dim");
}
+ if (len == 0) {
+ for (int i = 0; i < MAXDIM; ++i) {
+ dim[i] = 1;
+ }
+ }
+
dim[idx] = value;
resetLen();
}
}
void matchLoss(const char *file) {
- nntrainer::Tensor loss;
+ nntrainer::Tensor loss(1, 1, 1, 1);
loadFile(file, loss);
EXPECT_NEAR(layers.back()->getLoss(), *(loss.getData()), tolerance);
}
EXPECT_NEAR(out_ptr[i], golden[i], tolerance);
}
}
+
+ virtual void prepareLayer() { setInputDim("2:3:5:5"); }
};
TEST_F(nntrainer_Pooling2DLayer, setProperty_01_p) {
- setInputDim("2:3:5:5");
setProperty("pooling_size=2,2 | stride=1,1 | padding=0,0 | pooling=average");
}
TEST_F(nntrainer_Pooling2DLayer, setProperty_02_n) {
- setInputDim("2:3:5:5");
int status = layer.setProperty({"pooling_size="});
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
}
}
class nntrainer_FlattenLayer
- : public nntrainer_abstractLayer<nntrainer::FlattenLayer> {};
+ : public nntrainer_abstractLayer<nntrainer::FlattenLayer> {
+protected:
+ virtual void prepareLayer() { setInputDim("1:2:4:4"); }
+};
/**
* @brief Flatten Layer
*/
TEST_F(nntrainer_FlattenLayer, forwarding_01_p) {
- setInputDim("1:2:4:4");
reinitialize(false);
EXPECT_EQ(out.getDim(), nntrainer::TensorDim(1, 1, 1, 32));
* @brief Flatten Layer
*/
TEST_F(nntrainer_FlattenLayer, backwarding_01_p) {
- setInputDim("1:2:4:4");
reinitialize(false);
EXPECT_EQ(out.getDim(), nntrainer::TensorDim(1, 1, 1, 32));
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
}
-TEST(nntrainer_ActivationLayer, init_01_1) {
+TEST(nntrainer_ActivationLayer, init_01_n) {
+ nntrainer::ActivationLayer layer;
+ EXPECT_THROW(layer.initialize(false), std::invalid_argument);
+}
+
+TEST(nntrainer_ActivationLayer, init_02_p) {
int status = ML_ERROR_NONE;
nntrainer::ActivationLayer layer;
- status = layer.initialize(false);
+ layer.setInputDimension({1, 1, 1, 1});
+ status = layer.initialize(false);
EXPECT_EQ(status, ML_ERROR_NONE);
}