[layers] Dump acti_func into header
[platform/core/ml/nntrainer.git] / nntrainer / models / circle_plus / circle_plus.fbs
1 namespace circle_plus;
2
3 file_extension "op";
4
5 enum TensorType : byte {
6      FLOAT32 = 0,
7      FLOAT16 = 1,
8 }
9
10 enum InitializerType: byte{
11      ZEORS = 0,
12      ONES = 1,
13      LECUN_NORMAL = 2,
14      LECUN_UNIFORM = 3,
15      XAVIER_NORMAL = 4,
16      XAVIER_UNIFORM = 5,
17      HE_NORMAL = 6,
18      HE_UNIFORM = 7,
19      NONE=8,
20 }
21
22 enum ActivationType: byte{
23      TANH=0,
24      SIGMOID=1,
25      SOFTMAX=2,
26      RELU=3,
27      LEAKY_RELU=4,
28      NONE=5,
29 }
30
31 //Tensor
32 table Tensor{
33       type:TensorType;
34       dim:[int];
35       name:string;
36       buffer:uint;
37 }
38
39 //Tensor Mapping : name - index
40 table TensorMap{
41       name:string;
42       index:uint;
43 }
44
45
46 //Buffer
47 table Buffer{
48       data:[ubyte];
49 }
50
51
52 //Layers
53 enum LayerTypes : int32 {
54      FULLY_CONNECTED = 0,
55 }
56
57 union LayerOptions {
58       FullyConnectedOptions,
59 }
60
61 table FullyConnectedOptions {
62      unit:uint;
63      weight_initializer:InitializerType;
64      bias_initializer:InitializerType;
65 }
66
67 table Layers {
68       type:LayerTypes;
69       name:string;
70       options:LayerOptions;
71       input_layers:[string];
72       input_shape:[int];
73       activation:ActivationType;
74       weignts:[Tensor];
75       input_tensors:[Tensor];
76       output_tensors:[Tensor];
77 }
78
79
80 //Learning Rate
81 enum LRSchedulerType: int32{
82      CONSTANT = 0,
83      EXPONENTIAL = 1,
84      STEP=2,
85 }
86
87 union LROptions{
88       ConstantLROptions,
89       ExponentialLROptions,
90       StepLROptions,
91 }
92
93 table ConstantLROptions{
94       learning_rate:float;
95 }
96
97 table ExponentialLROptions{
98       learning_rate:float;
99 }
100
101 table StepLROptions{
102       learning_rate:float;
103 }
104
105 table LRScheduler{
106       type:LRSchedulerType;
107       options:LROptions;
108 }
109
110 //Optimizer 
111 enum OptimizerType: int32{
112      SGD = 0,
113      ADAM =1,
114 }
115
116 union OptimizerOptions{
117       SGDOptimizerOptions,
118       AdamOptimizerOptions,
119 }
120
121 table SGDOptimizerOptions{
122       
123 }
124
125 table AdamOptimizerOptions{
126       beta1:float;
127       beta2:float;
128       epsilon:float;
129 }
130
131 table Optimizer{
132       type:OptimizerType;
133       options:OptimizerOptions;
134 }
135
136 enum LossType:int32{
137      MSE =0,
138      CROSS = 1,
139 }
140
141 union LossOptions{
142       MSELossOptions,
143       CrossLossOptions,
144 }
145
146 table MSELossOptions{
147 }
148
149 table CrossLossOptions{
150 }
151
152 table Loss{
153       type:LossType;
154       options:LossOptions;
155 }
156
157 table NetworkGraph{
158       name:string;
159       input_tensors:[int];
160       output_tensors:[int];
161       layers:[Layers];
162 }
163
164
165 table Model {
166       name:string;
167       epochs:uint;
168       batch_size:uint;
169       optimizer:Optimizer;
170       learning_rate_scheduler:LRScheduler;
171       loss:Loss;      
172       network_graph:[NetworkGraph];
173 }
174
175 root_type Model;