From b261f186005f2424317c81735fdb92081f87df94 Mon Sep 17 00:00:00 2001 From: "Efimov Alexander/AI Tools Lab/./Samsung Electronics" Date: Fri, 3 Aug 2018 15:07:53 +0300 Subject: [PATCH] Soft backend: relu operation (#832) Add implementation of relu operation Signed-off-by: Efimov Alexander --- .../libs/backend/soft/include/cpp_operations.def | 7 ++++++- .../libs/backend/soft/include/cpp_ops/cpp_relu.def | 22 ++++++++++++++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/contrib/nnc/libs/backend/soft/include/cpp_operations.def b/contrib/nnc/libs/backend/soft/include/cpp_operations.def index 38f2239..813a0bc 100644 --- a/contrib/nnc/libs/backend/soft/include/cpp_operations.def +++ b/contrib/nnc/libs/backend/soft/include/cpp_operations.def @@ -226,7 +226,12 @@ void biasAdd(Tensor &out, const char *params, const Tensor &in) void relu(Tensor &out, const char *params, const Tensor &in) { - // TODO call actual function + const float *input = in.getData(); + Dims<4> input_d = shapeToDims(in.getShape()); + + out.reShape(in.getShape()); + + Relu(input, input_d, out.getData(), input_d); } void reshape(Tensor &out, const char *params, const Tensor &in) diff --git a/contrib/nnc/libs/backend/soft/include/cpp_ops/cpp_relu.def b/contrib/nnc/libs/backend/soft/include/cpp_ops/cpp_relu.def index 8b13789..0082cf7 100644 --- a/contrib/nnc/libs/backend/soft/include/cpp_ops/cpp_relu.def +++ b/contrib/nnc/libs/backend/soft/include/cpp_ops/cpp_relu.def @@ -1 +1,23 @@ +/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +inline void Relu(const float* input_data, const Dims<4>& input_dims, + float* output_data, const Dims<4>& output_dims) { + + const auto input = MapAsVector(input_data, input_dims); + auto output = MapAsVector(output_data, output_dims); + + output = input.cwiseMax(0.0f); +} -- 2.7.4