[WIP] [Tensor] Add __fp16 supporting functions in blas_interface
[platform/core/ml/nntrainer.git] / nntrainer.ini.in
1 #######################
2 # nntrainer conf file #
3 #######################
4
5 # default plugin paths, below path is searched and registered when the library is being loaded.
6 [plugins]
7 # path to search for layers
8 layer=@PLUGIN_INSTALL_PREFIX@/layers
9
10 [swap]
11 # enable memory swap feature
12 memory_swap = @MEMORY_SWAP@
13
14 # path to save swap file
15 memory_swap_path = @MEMORY_SWAP_PATH@
16
17 # look ahead window size
18 memory_swap_lookahead = @MEMORY_SWAP_LOOKAHEAD@