From 0bcf788e8bbca2a6aed2a6cb986125694c70b288 Mon Sep 17 00:00:00 2001 From: Jean-Marc Valin Date: Thu, 3 Aug 2017 20:12:57 -0400 Subject: [PATCH] RNN C code --- src/rnn.c | 208 +++++++++++++++++++++++++++++++++++++++++++++++++++ src/rnn.h | 61 +++++++++++++++ src/tansig_table.h | 45 +++++++++++ training/dump_rnn.py | 67 +++++++++++++++++ 4 files changed, 381 insertions(+) create mode 100644 src/rnn.c create mode 100644 src/rnn.h create mode 100644 src/tansig_table.h create mode 100755 training/dump_rnn.py diff --git a/src/rnn.c b/src/rnn.c new file mode 100644 index 0000000..6fb8286 --- /dev/null +++ b/src/rnn.c @@ -0,0 +1,208 @@ +/* Copyright (c) 2008-2011 Octasic Inc. + 2012-2017 Jean-Marc Valin */ +/* + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + - Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + - Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include "opus_types.h" +#include "common.h" +#include "arch.h" +#include "tansig_table.h" +#include "rnn.h" +#include + +static OPUS_INLINE float tansig_approx(float x) +{ + int i; + float y, dy; + float sign=1; + /* Tests are reversed to catch NaNs */ + if (!(x<8)) + return 1; + if (!(x>-8)) + return -1; +#ifndef FIXED_POINT + /* Another check in case of -ffast-math */ + if (celt_isnan(x)) + return 0; +#endif + if (x<0) + { + x=-x; + sign=-1; + } + i = (int)floor(.5f+25*x); + x -= .04f*i; + y = tansig_table[i]; + dy = 1-y*y; + y = y + x*dy*(1 - y*x); + return sign*y; +} + +static OPUS_INLINE float sigmoid_approx(float x) +{ + return .5 + .5*tansig_approx(.5*x); +} + +static OPUS_INLINE float relu(float x) +{ + return x < 0 ? 0 : x; +} + +void compute_dense(const DenseLayer *layer, float *output, const float *input) +{ + int i, j; + int N, M; + int stride; + M = layer->nb_inputs; + N = layer->nb_neurons; + stride = N; + for (i=0;ibias[i]; + for (j=0;jinput_weights[j*stride + i]*input[j]; + output[i] = WEIGHTS_SCALE*sum; + } + if (layer->activation == activation_sigmoid) { + for (i=0;iactivation == activation_tanh) { + for (i=0;iactivation == activation_relu) { + for (i=0;inb_inputs; + N = gru->nb_neurons; + stride = 3*N; + for (i=0;ibias[i]; + for (j=0;jinput_weights[j*stride + i]*input[j]; + for (j=0;jrecurrent_weights[j*stride + i]*state[j]; + z[i] = sigmoid_approx(WEIGHTS_SCALE*sum); + } + for (i=0;ibias[N + i]; + for (j=0;jinput_weights[N + j*stride + i]*input[j]; + for (j=0;jrecurrent_weights[N + j*stride + i]*state[j]; + r[i] = sigmoid_approx(WEIGHTS_SCALE*sum); + } + for (i=0;ibias[2*N + i]; + for (j=0;jinput_weights[2*N + j*stride + i]*input[j]; + for (j=0;jrecurrent_weights[2*N + j*stride + i]*state[j]*r[j]; + if (gru->activation == activation_sigmoid) sum = sigmoid_approx(WEIGHTS_SCALE*sum); + else if (gru->activation == activation_tanh) sum = tansig_approx(WEIGHTS_SCALE*sum); + else if (gru->activation == activation_relu) sum = relu(WEIGHTS_SCALE*sum); + else *(int*)0=0; + h[i] = z[i]*state[i] + (1-z[i])*sum; + } + for (i=0;i 2: + printVector(f, weights[1], layer.name + '_recurrent_weights') + printVector(f, weights[-1], layer.name + '_bias') + name = layer.name + activation = re.search('function (.*) at', str(layer.activation)).group(1) + if len(weights) > 2: + f.write('const GRULayer {} = {{\n {}_bias,\n {}_weights,\n {}_recurrent_weights,\n {}, {}, activation_{}\n}};\n\n' + .format(name, name, name, name, weights[0].shape[0], weights[0].shape[1]/3, activation)) + else: + f.write('const DenseLayer {} = {{\n {}_bias,\n {}_weights,\n {}, {}, activation_{}\n}};\n\n' + .format(name, name, name, weights[0].shape[0], weights[0].shape[1], activation)) + + +def mean_squared_sqrt_error(y_true, y_pred): + return K.mean(K.square(K.sqrt(y_pred) - K.sqrt(y_true)), axis=-1) + + +model = load_model(sys.argv[1], custom_objects={'msse': mean_squared_sqrt_error, 'mean_squared_sqrt_error': mean_squared_sqrt_error, 'my_crossentropy': mean_squared_sqrt_error, 'mycost': mean_squared_sqrt_error}) + +weights = model.get_weights() + +f = open(sys.argv[2], 'w') + +f.write('/*This file is automatically generated from a Keras model*/\n\n') +f.write('#ifdef HAVE_CONFIG_H\n#include "config.h"\n#endif\n\n#include "rnn.h"\n\n') + +for i, layer in enumerate(model.layers): + if len(layer.get_weights()) > 0: + printLayer(f, layer) + + +f.close() -- 2.7.4