Change-Id: Ia5118125ac42d2011a111326c235d71a8551f4a7
Signed-off-by: Hyunil <hyunil46.park@samsung.com>
Learning_rate = 0.0001 # Learning Rate
Decay_rate = 0.96 # for the decay_rate for the decayed learning rate
Decay_steps = 1000 # decay step for the exponential decayed learning rate
-Epochs = 3 # Epoch
+Epochs = 1 # Epoch
Optimizer = adam # Optimizer : sgd (stochastic gradien decent),
# adam (Adamtive Moment Estimation)
loss = cross # Cost(loss) function : mse (mean squared error)