Rename RELU1 to RELU_N1_TO_1 to indicate that the image of the Op is in between ...
authorA. Unique TensorFlower <gardener@tensorflow.org>
Sat, 13 Jan 2018 23:28:01 +0000 (15:28 -0800)
committerTensorFlower Gardener <gardener@tensorflow.org>
Sat, 13 Jan 2018 23:31:55 +0000 (15:31 -0800)
PiperOrigin-RevId: 181864303

14 files changed:
tensorflow/contrib/lite/g3doc/tf_ops_compatibility.md
tensorflow/contrib/lite/kernels/activations.cc
tensorflow/contrib/lite/kernels/activations_test.cc
tensorflow/contrib/lite/kernels/add_test.cc
tensorflow/contrib/lite/kernels/mul_test.cc
tensorflow/contrib/lite/kernels/register.cc
tensorflow/contrib/lite/model.cc
tensorflow/contrib/lite/nnapi_delegate.cc
tensorflow/contrib/lite/schema/schema.fbs
tensorflow/contrib/lite/schema/schema_generated.h
tensorflow/contrib/lite/toco/tflite/operator.cc
tensorflow/contrib/lite/toco/tflite/operator_test.cc
tensorflow/contrib/lite/toco/tflite/types.cc
tensorflow/contrib/lite/toco/tflite/types_test.cc

index 9ade04eb8c696d7e0e39a8104e02b6e5feec95eb..8e5e694a5cbe7f908572114db33c8257db6151f0 100644 (file)
@@ -329,18 +329,18 @@ Inputs {
   0: a tensor
 }
 Outputs {
-  0: a tensor equivalent to max(0, min(input, 1)
+  0: a tensor equivalent to max(0, input)
 }
 ```
 
-**RELU1**
+**RELU_N1_TO_1**
 
 ```
 Inputs {
   0: a tensor
 }
 Outputs {
-  0: a tensor equivalent to max(-1, min(input, 6)
+  0: a tensor equivalent to max(-1, min(input, 1)
 }
 ```
 
index 7ab60a33e5e2ff61bae5f4c6db85ab9c47a391bc..8ac93bc8c8dcfc66d3822e01b6f9b29a3e49c446 100644 (file)
@@ -349,7 +349,7 @@ TfLiteRegistration* Register_RELU() {
   return &r;
 }
 
-TfLiteRegistration* Register_RELU1() {
+TfLiteRegistration* Register_RELU_N1_TO_1() {
   static TfLiteRegistration r = {/*init=*/nullptr, /*free=*/nullptr,
                                  activations::GenericPrepare,
                                  activations::Relu1Eval};
index 33ca56e745c043efd12b851af14f273fb273d577..68d49944e51b043b6b82aa1589d22f6ebed37574 100644 (file)
@@ -102,7 +102,7 @@ TEST(FloatActivationsOpTest, Relu) {
 }
 
 TEST(FloatActivationsOpTest, Relu1) {
-  FloatActivationsOpModel m(BuiltinOperator_RELU1,
+  FloatActivationsOpModel m(BuiltinOperator_RELU_N1_TO_1,
                             /*input=*/{TensorType_FLOAT32, {1, 2, 4, 1}});
   m.SetInput({
       0.0, -0.6, 0.2, -0.4,  //
index ddf45bb576755d57d50c9e6e01bf50f15612c56d..306dfc3e803d3df34061767ba9ced032299bfa26 100644 (file)
@@ -77,9 +77,10 @@ TEST(FloatAddOpModel, NoActivation) {
   EXPECT_THAT(m.GetOutput(), ElementsAreArray({-1.9, 0.4, 1.0, 1.3}));
 }
 
-TEST(FloatAddOpModel, ActivationRELU1) {
+TEST(FloatAddOpModel, ActivationRELU_N1_TO_1) {
   FloatAddOpModel m({TensorType_FLOAT32, {1, 2, 2, 1}},
-                    {TensorType_FLOAT32, {}}, ActivationFunctionType_RELU1);
+                    {TensorType_FLOAT32, {}},
+                    ActivationFunctionType_RELU_N1_TO_1);
   m.PopulateTensor<float>(m.input1(), {-2.0, 0.2, 0.7, 0.8});
   m.PopulateTensor<float>(m.input2(), {0.1, 0.2, 0.3, 0.5});
   m.Invoke();
@@ -122,7 +123,7 @@ TEST(QuantizedAddOpModel, QuantizedTestsNoActivation) {
   }
 }
 
-TEST(QuantizedAddOpModel, QuantizedTestsActivationRELU1) {
+TEST(QuantizedAddOpModel, QuantizedTestsActivationRELU_N1_TO_1) {
   float kQuantizedTolerance = GetTolerance(-1.0, 1.0);
   std::vector<std::initializer_list<float>> inputs1 = {{-0.8, 0.2, 0.9, 0.7},
                                                        {-0.8, 0.2, 0.7, 0.3}};
@@ -133,7 +134,7 @@ TEST(QuantizedAddOpModel, QuantizedTestsActivationRELU1) {
   for (int i = 0; i < inputs1.size(); ++i) {
     QuantizedAddOpModel m({TensorType_UINT8, {1, 2, 2, 1}, -1.0, 1.0},
                           {TensorType_UINT8, {}, -1.0, 1.0},
-                          ActivationFunctionType_RELU1);
+                          ActivationFunctionType_RELU_N1_TO_1);
     m.QuantizeAndPopulate<uint8_t>(m.input1(), inputs1[i]);
     m.QuantizeAndPopulate<uint8_t>(m.input2(), inputs2[i]);
     m.Invoke();
index 4255cfe18a043c55f3ce7292afdedb6e988a28a2..8838b300c0af167bf2ffcf944fc7c31d6173f462 100644 (file)
@@ -78,9 +78,10 @@ TEST(FloatMulOpTest, NoActivation) {
               ElementsAreArray(ArrayFloatNear({-0.2, 0.04, 0.21, 0.4})));
 }
 
-TEST(FloatMulOpTest, ActivationRELU1) {
+TEST(FloatMulOpTest, ActivationRELU_N1_TO_1) {
   FloatMulOpModel m({TensorType_FLOAT32, {1, 2, 2, 1}},
-                    {TensorType_FLOAT32, {}}, ActivationFunctionType_RELU1);
+                    {TensorType_FLOAT32, {}},
+                    ActivationFunctionType_RELU_N1_TO_1);
   m.PopulateTensor<float>(m.input1(), {-2.0, 0.2, 0.7, 0.8});
   m.PopulateTensor<float>(m.input2(), {0.1, 0.2, 0.3, 5});
   m.Invoke();
index ecaf4d70429b9e3d8d0fcdeac33e16a89d08c6fc..de14afc54673c3997084422ce42f5caebbe43d9c 100644 (file)
@@ -20,7 +20,7 @@ namespace ops {
 namespace builtin {
 
 TfLiteRegistration* Register_RELU();
-TfLiteRegistration* Register_RELU1();
+TfLiteRegistration* Register_RELU_N1_TO_1();
 TfLiteRegistration* Register_RELU6();
 TfLiteRegistration* Register_TANH();
 TfLiteRegistration* Register_LOGISTIC();
@@ -57,7 +57,7 @@ TfLiteRegistration* Register_MEAN();
 
 BuiltinOpResolver::BuiltinOpResolver() {
   AddBuiltin(BuiltinOperator_RELU, Register_RELU());
-  AddBuiltin(BuiltinOperator_RELU1, Register_RELU1());
+  AddBuiltin(BuiltinOperator_RELU_N1_TO_1, Register_RELU_N1_TO_1());
   AddBuiltin(BuiltinOperator_RELU6, Register_RELU6());
   AddBuiltin(BuiltinOperator_TANH, Register_TANH());
   AddBuiltin(BuiltinOperator_LOGISTIC, Register_LOGISTIC());
index 0cd6c3e8dd89b287f9d403a111d95fc57c76f1b2..fe2a8bb7233dc8fc7b8dcfffc89684abc7554039 100644 (file)
@@ -230,7 +230,7 @@ void* ParseOpData(const Operator* op, BuiltinOperator op_type,
         return kTfLiteActNone;
       case ActivationFunctionType_RELU:
         return kTfLiteActRelu;
-      case ActivationFunctionType_RELU1:
+      case ActivationFunctionType_RELU_N1_TO_1:
         return kTfLiteActRelu1;
       case ActivationFunctionType_RELU6:
         return kTfLiteActRelu6;
@@ -286,7 +286,7 @@ void* ParseOpData(const Operator* op, BuiltinOperator op_type,
     case BuiltinOperator_TANH:
     case BuiltinOperator_LOGISTIC:
     case BuiltinOperator_RELU:
-    case BuiltinOperator_RELU1:
+    case BuiltinOperator_RELU_N1_TO_1:
     case BuiltinOperator_RELU6:
     case BuiltinOperator_CONCAT_EMBEDDINGS:
       break;
index 0be7cd96c9e9b2b9336046a48286757bb0ac49e7..ec42152e5c29a82da8673b61ce2959a1fc698353 100644 (file)
@@ -329,7 +329,7 @@ void AddOpsAndParams(tflite::Interpreter* interpreter,
       case tflite::BuiltinOperator_RESIZE_BILINEAR:
       case tflite::BuiltinOperator_CALL:
       case tflite::BuiltinOperator_SKIP_GRAM:
-      case tflite::BuiltinOperator_RELU1:
+      case tflite::BuiltinOperator_RELU_N1_TO_1:
       case tflite::BuiltinOperator_GATHER:
       case tflite::BuiltinOperator_SPACE_TO_BATCH_ND:
       case tflite::BuiltinOperator_BATCH_TO_SPACE_ND:
index 54ef48f4edb7f771fc6b7a0e763078106e1758e2..0a2c63e2b2c94902fe1d04273c260d612e736262 100644 (file)
@@ -89,7 +89,7 @@ enum BuiltinOperator : byte {
   MAX_POOL_2D = 17,
   MUL = 18,
   RELU = 19,
-  RELU1 = 20,
+  RELU_N1_TO_1 = 20,
   RELU6 = 21,
   RESHAPE = 22,
   RESIZE_BILINEAR = 23,
@@ -149,7 +149,7 @@ enum Padding : byte { SAME, VALID }
 enum ActivationFunctionType : byte {
   NONE = 0,
   RELU = 1,
-  RELU1 = 2,
+  RELU_N1_TO_1 = 2,
   RELU6 = 3,
   TANH = 4,
   SIGN_BIT = 5,
index 0774a216f43fc8053125cc37a29619a619ec1f51..b237a61203d49ffd1469cda1ee03f937a0417177 100755 (executable)
@@ -170,7 +170,7 @@ enum BuiltinOperator {
   BuiltinOperator_MAX_POOL_2D = 17,
   BuiltinOperator_MUL = 18,
   BuiltinOperator_RELU = 19,
-  BuiltinOperator_RELU1 = 20,
+  BuiltinOperator_RELU_N1_TO_1 = 20,
   BuiltinOperator_RELU6 = 21,
   BuiltinOperator_RESHAPE = 22,
   BuiltinOperator_RESIZE_BILINEAR = 23,
@@ -214,7 +214,7 @@ inline BuiltinOperator (&EnumValuesBuiltinOperator())[38] {
       BuiltinOperator_MAX_POOL_2D,
       BuiltinOperator_MUL,
       BuiltinOperator_RELU,
-      BuiltinOperator_RELU1,
+      BuiltinOperator_RELU_N1_TO_1,
       BuiltinOperator_RELU6,
       BuiltinOperator_RESHAPE,
       BuiltinOperator_RESIZE_BILINEAR,
@@ -259,7 +259,7 @@ inline const char **EnumNamesBuiltinOperator() {
                                 "MAX_POOL_2D",
                                 "MUL",
                                 "RELU",
-                                "RELU1",
+                                "RELU_N1_TO_1",
                                 "RELU6",
                                 "RESHAPE",
                                 "RESIZE_BILINEAR",
@@ -888,7 +888,7 @@ inline const char *EnumNamePadding(Padding e) {
 enum ActivationFunctionType {
   ActivationFunctionType_NONE = 0,
   ActivationFunctionType_RELU = 1,
-  ActivationFunctionType_RELU1 = 2,
+  ActivationFunctionType_RELU_N1_TO_1 = 2,
   ActivationFunctionType_RELU6 = 3,
   ActivationFunctionType_TANH = 4,
   ActivationFunctionType_SIGN_BIT = 5,
@@ -898,14 +898,14 @@ enum ActivationFunctionType {
 
 inline ActivationFunctionType (&EnumValuesActivationFunctionType())[6] {
   static ActivationFunctionType values[] = {
-      ActivationFunctionType_NONE,  ActivationFunctionType_RELU,
-      ActivationFunctionType_RELU1, ActivationFunctionType_RELU6,
-      ActivationFunctionType_TANH,  ActivationFunctionType_SIGN_BIT};
+      ActivationFunctionType_NONE,         ActivationFunctionType_RELU,
+      ActivationFunctionType_RELU_N1_TO_1, ActivationFunctionType_RELU6,
+      ActivationFunctionType_TANH,         ActivationFunctionType_SIGN_BIT};
   return values;
 }
 
 inline const char **EnumNamesActivationFunctionType() {
-  static const char *names[] = {"NONE", "RELU",     "RELU1", "RELU6",
+  static const char *names[] = {"NONE", "RELU",     "RELU_N1_TO_1", "RELU6",
                                 "TANH", "SIGN_BIT", nullptr};
   return names;
 }
index d6335b82537462f0c582f91e21b23e6dbff7f481..ae6c716eab55fd13116ff83f30be6150021a5494 100644 (file)
@@ -738,7 +738,7 @@ std::vector<std::unique_ptr<BaseOperator>> BuildOperatorList() {
   ops.emplace_back(
       new SimpleOperator<ReluOperator>("RELU", OperatorType::kRelu));
   ops.emplace_back(
-      new SimpleOperator<Relu1Operator>("RELU1", OperatorType::kRelu1));
+      new SimpleOperator<Relu1Operator>("RELU_N1_TO_1", OperatorType::kRelu1));
   ops.emplace_back(
       new SimpleOperator<Relu6Operator>("RELU6", OperatorType::kRelu6));
   ops.emplace_back(new SimpleOperator<ResizeBilinearOperator>(
index 093144f6ac83f457ec0e0a8627f2367bb21771a6..debce637609cbe6af14e402e5c61418b27030f29 100644 (file)
@@ -102,7 +102,7 @@ TEST_F(OperatorTest, SimpleOperators) {
                                           OperatorType::kDequantize);
   CheckSimpleOperator<FloorOperator>("FLOOR", OperatorType::kFloor);
   CheckSimpleOperator<ReluOperator>("RELU", OperatorType::kRelu);
-  CheckSimpleOperator<Relu1Operator>("RELU1", OperatorType::kRelu1);
+  CheckSimpleOperator<Relu1Operator>("RELU_N1_TO_1", OperatorType::kRelu1);
   CheckSimpleOperator<Relu6Operator>("RELU6", OperatorType::kRelu6);
   CheckSimpleOperator<ResizeBilinearOperator>("RESIZE_BILINEAR",
                                               OperatorType::kResizeBilinear);
index a6fa0237bc08d7d721fa4955c802b1faea69ca75..5cd1675f5424ad0832c89da7b038ae803713b919 100644 (file)
@@ -146,7 +146,7 @@ PaddingType Padding::Deserialize(int padding) {
     case FusedActivationFunctionType::kRelu6:
       return ::tflite::ActivationFunctionType_RELU6;
     case FusedActivationFunctionType::kRelu1:
-      return ::tflite::ActivationFunctionType_RELU1;
+      return ::tflite::ActivationFunctionType_RELU_N1_TO_1;
     default:
       LOG(FATAL) << "Unhandled fused activation function type.";
   }
@@ -161,7 +161,7 @@ FusedActivationFunctionType ActivationFunction::Deserialize(
       return FusedActivationFunctionType::kRelu;
     case ::tflite::ActivationFunctionType_RELU6:
       return FusedActivationFunctionType::kRelu6;
-    case ::tflite::ActivationFunctionType_RELU1:
+    case ::tflite::ActivationFunctionType_RELU_N1_TO_1:
       return FusedActivationFunctionType::kRelu1;
     default:
       LOG(FATAL) << "Unhandled fused activation function type.";
index 174b78f3e632fde8dc6ea0ed83ed7a67fa12c16a..e982081f766762919552c17b0b25a50275ffe19a 100644 (file)
@@ -172,7 +172,7 @@ TEST(ActivationFunction, All) {
                   {FusedActivationFunctionType::kRelu6,
                    ::tflite::ActivationFunctionType_RELU6},
                   {FusedActivationFunctionType::kRelu1,
-                   ::tflite::ActivationFunctionType_RELU1}};
+                   ::tflite::ActivationFunctionType_RELU_N1_TO_1}};
   for (auto x : testdata) {
     EXPECT_EQ(x.second, ActivationFunction::Serialize(x.first));
     EXPECT_EQ(x.first, ActivationFunction::Deserialize(x.second));