Add skeleton of NeuralNetworks.cpp
authorHanjoung Lee <hanjoung.lee@samsung.com>
Wed, 21 Mar 2018 05:23:22 +0000 (14:23 +0900)
committer최형규/동작제어Lab(SR)/Senior Engineer/삼성전자 <hk0110.choi@samsung.com>
Wed, 21 Mar 2018 06:16:54 +0000 (15:16 +0900)
Import NeuralNetworks.cpp from Android NN but most implmentation are commented
out for reference.

src/runtime/ref/nn/runtime/CMakeLists.txt
src/runtime/ref/nn/runtime/NeuralNetworks.cpp [new file with mode: 0644]

index 27eb2b7..6e1a438 100644 (file)
@@ -1,3 +1,11 @@
+# Library `runtime`
+SET (RUNTIME_SRCS NeuralNetworks.cpp)
+
+add_library(runtime SHARED ${RUNTIME_SRCS})
+target_include_directories(runtime PRIVATE . include)
+
+# Executable `runtime_run` (Dummy runner executable for simple testing bring-up stage)
+# TODO remove the executable later
 file(GLOB SRCS *.cc)
 file(GLOB HDRS *.h)
 
diff --git a/src/runtime/ref/nn/runtime/NeuralNetworks.cpp b/src/runtime/ref/nn/runtime/NeuralNetworks.cpp
new file mode 100644 (file)
index 0000000..8cd594d
--- /dev/null
@@ -0,0 +1,609 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Contains all the entry points to the C Neural Networks API.
+// We do basic validation of the operands and then call the class
+// that implements the functionality.
+
+#include "NeuralNetworks.h"
+
+// TODO Include these files once availible
+#if 0
+#include "Callbacks.h"
+#include "CompilationBuilder.h"
+#include "ExecutionBuilder.h"
+#include "Manager.h"
+#include "Memory.h"
+#include "NeuralNetworksOEM.h"
+#include "ModelBuilder.h"
+
+#include <memory>
+#include <vector>
+#endif
+
+// TODO Uncomment these static asserts once ready
+#if 0
+// Make sure the constants defined in the header files have not changed values.
+// IMPORTANT: When adding new values, update kNumberOfDataTypes or kNumberOfDataTypesOEM
+// in Utils.h.
+static_assert(ANEURALNETWORKS_FLOAT32 == 0, "ANEURALNETWORKS_FLOAT32 has changed");
+static_assert(ANEURALNETWORKS_INT32 == 1, "ANEURALNETWORKS_INT32 has changed");
+static_assert(ANEURALNETWORKS_UINT32 == 2, "ANEURALNETWORKS_UINT32 has changed");
+static_assert(ANEURALNETWORKS_TENSOR_FLOAT32 == 3,
+              "ANEURALNETWORKS_TENSOR_FLOAT32 has changed");
+static_assert(ANEURALNETWORKS_TENSOR_INT32 == 4, "ANEURALNETWORKS_TENSOR_INT32 has changed");
+static_assert(ANEURALNETWORKS_TENSOR_QUANT8_ASYMM == 5,
+              "ANEURALNETWORKS_TENSOR_QUANT8_ASYMM has changed");
+static_assert(ANEURALNETWORKS_OEM_SCALAR == 10000, "ANEURALNETWORKS_OEM_SCALAR has changed");
+static_assert(ANEURALNETWORKS_TENSOR_OEM_BYTE == 10001,
+              "ANEURALNETWORKS_TENSOR_OEM_BYTE has changed");
+
+// IMPORTANT: When adding new values, update kNumberOfOperationTypes or
+// kNumberOfOperationTypesOEMin Utils.h.
+static_assert(ANEURALNETWORKS_ADD == 0, "ANEURALNETWORKS_ADD has changed");
+static_assert(ANEURALNETWORKS_AVERAGE_POOL_2D == 1,
+              "ANEURALNETWORKS_AVERAGE_POOL_2D has changed");
+static_assert(ANEURALNETWORKS_CONCATENATION == 2, "ANEURALNETWORKS_CONCATENATION has changed");
+static_assert(ANEURALNETWORKS_CONV_2D == 3, "ANEURALNETWORKS_CONV_2D has changed");
+static_assert(ANEURALNETWORKS_DEPTHWISE_CONV_2D == 4,
+              "ANEURALNETWORKS_DEPTHWISE_CONV_2D has changed");
+static_assert(ANEURALNETWORKS_DEPTH_TO_SPACE == 5,
+              "ANEURALNETWORKS_DEPTH_TO_SPACE has changed");
+static_assert(ANEURALNETWORKS_DEQUANTIZE == 6, "ANEURALNETWORKS_DEQUANTIZE has changed");
+static_assert(ANEURALNETWORKS_EMBEDDING_LOOKUP == 7,
+              "ANEURALNETWORKS_EMBEDDING_LOOKUP has changed");
+static_assert(ANEURALNETWORKS_FLOOR == 8, "ANEURALNETWORKS_FLOOR has changed");
+static_assert(ANEURALNETWORKS_FULLY_CONNECTED == 9,
+              "ANEURALNETWORKS_FULLY_CONNECTED has changed");
+static_assert(ANEURALNETWORKS_HASHTABLE_LOOKUP == 10,
+              "ANEURALNETWORKS_HASHTABLE_LOOKUP has changed");
+static_assert(ANEURALNETWORKS_L2_NORMALIZATION == 11,
+              "ANEURALNETWORKS_L2_NORMALIZATION has changed");
+static_assert(ANEURALNETWORKS_L2_POOL_2D == 12, "ANEURALNETWORKS_L2_POOL has changed");
+static_assert(ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION == 13,
+              "ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION has changed");
+static_assert(ANEURALNETWORKS_LOGISTIC == 14, "ANEURALNETWORKS_LOGISTIC has changed");
+static_assert(ANEURALNETWORKS_LSH_PROJECTION == 15,
+              "ANEURALNETWORKS_LSH_PROJECTION has changed");
+static_assert(ANEURALNETWORKS_LSTM == 16, "ANEURALNETWORKS_LSTM has changed");
+static_assert(ANEURALNETWORKS_MAX_POOL_2D == 17, "ANEURALNETWORKS_MAX_POOL has changed");
+static_assert(ANEURALNETWORKS_MUL == 18, "ANEURALNETWORKS_MUL has changed");
+static_assert(ANEURALNETWORKS_RELU == 19, "ANEURALNETWORKS_RELU has changed");
+static_assert(ANEURALNETWORKS_RELU1 == 20, "ANEURALNETWORKS_RELU1 has changed");
+static_assert(ANEURALNETWORKS_RELU6 == 21, "ANEURALNETWORKS_RELU6 has changed");
+static_assert(ANEURALNETWORKS_RESHAPE == 22, "ANEURALNETWORKS_RESHAPE has changed");
+static_assert(ANEURALNETWORKS_RESIZE_BILINEAR == 23,
+              "ANEURALNETWORKS_RESIZE_BILINEAR has changed");
+static_assert(ANEURALNETWORKS_RNN == 24, "ANEURALNETWORKS_RNN has changed");
+static_assert(ANEURALNETWORKS_SOFTMAX == 25, "ANEURALNETWORKS_SOFTMAX has changed");
+static_assert(ANEURALNETWORKS_SPACE_TO_DEPTH == 26,
+              "ANEURALNETWORKS_SPACE_TO_DEPTH has changed");
+static_assert(ANEURALNETWORKS_SVDF == 27, "ANEURALNETWORKS_SVDF has changed");
+static_assert(ANEURALNETWORKS_TANH == 28, "ANEURALNETWORKS_TANH has changed");
+static_assert(ANEURALNETWORKS_OEM_OPERATION == 10000,
+              "ANEURALNETWORKS_OEM_OPERATION has changed");
+
+static_assert(ANEURALNETWORKS_FUSED_NONE == 0, "ANEURALNETWORKS_FUSED_NONE has changed");
+static_assert(ANEURALNETWORKS_FUSED_RELU == 1, "ANEURALNETWORKS_FUSED_RELU has changed");
+static_assert(ANEURALNETWORKS_FUSED_RELU1 == 2, "ANEURALNETWORKS_FUSED_RELU1 has changed");
+static_assert(ANEURALNETWORKS_FUSED_RELU6 == 3, "ANEURALNETWORKS_FUSED_RELU6 has changed");
+
+static_assert(ANEURALNETWORKS_PREFER_LOW_POWER == 0,
+              "ANEURALNETWORKS_PREFER_LOW_POWER has changed");
+static_assert(ANEURALNETWORKS_PREFER_FAST_SINGLE_ANSWER == 1,
+              "ANEURALNETWORKS_PREFER_FAST_SINGLE_ANSWER has changed");
+static_assert(ANEURALNETWORKS_PREFER_SUSTAINED_SPEED == 2,
+              "ANEURALNETWORKS_PREFER_SUSTAINED_SPEED has changed");
+
+static_assert(ANEURALNETWORKS_NO_ERROR == 0, "ANEURALNETWORKS_NO_ERROR has changed");
+static_assert(ANEURALNETWORKS_OUT_OF_MEMORY == 1, "ANEURALNETWORKS_OUT_OF_MEMORY has changed");
+static_assert(ANEURALNETWORKS_INCOMPLETE == 2, "ANEURALNETWORKS_INCOMPLETE has changed");
+static_assert(ANEURALNETWORKS_UNEXPECTED_NULL == 3,
+              "ANEURALNETWORKS_UNEXPECTED_NULL has changed");
+static_assert(ANEURALNETWORKS_BAD_DATA == 4, "ANEURALNETWORKS_BAD_DATA has changed");
+static_assert(ANEURALNETWORKS_OP_FAILED == 5, "ANEURALNETWORKS_OP_FAILED has changed");
+static_assert(ANEURALNETWORKS_BAD_STATE == 6, "ANEURALNETWORKS_BAD_STATE has changed");
+
+static_assert(ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES == 128,
+              "ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES has changed");
+
+// Make sure that the constants are compatible with the values defined in
+// hardware/interfaces/neuralnetworks/1.0/types.hal.
+static_assert(static_cast<int32_t>(OperandType::OEM) == ANEURALNETWORKS_OEM_SCALAR,
+              "OEM != ANEURALNETWORKS_OEM");
+static_assert(static_cast<int32_t>(OperandType::FLOAT32) == ANEURALNETWORKS_FLOAT32,
+              "FLOAT32 != ANEURALNETWORKS_FLOAT32");
+static_assert(static_cast<int32_t>(OperandType::INT32) == ANEURALNETWORKS_INT32,
+              "INT32 != ANEURALNETWORKS_INT32");
+static_assert(static_cast<int32_t>(OperandType::UINT32) == ANEURALNETWORKS_UINT32,
+              "UINT32 != ANEURALNETWORKS_UINT32");
+static_assert(static_cast<int32_t>(OperandType::TENSOR_OEM_BYTE) == ANEURALNETWORKS_TENSOR_OEM_BYTE,
+              "TENSOR_OEM_BYTE != ANEURALNETWORKS_TENSOR_OEM_BYTE");
+static_assert(static_cast<int32_t>(OperandType::TENSOR_FLOAT32) == ANEURALNETWORKS_TENSOR_FLOAT32,
+              "TENSOR_FLOAT32 != ANEURALNETWORKS_TENSOR_FLOAT32");
+static_assert(static_cast<int32_t>(OperandType::TENSOR_QUANT8_ASYMM) ==
+                          ANEURALNETWORKS_TENSOR_QUANT8_ASYMM,
+              "TENSOR_QUANT8_ASYMM != ANEURALNETWORKS_TENSOR_QUANT8_ASYMM");
+
+static_assert(static_cast<int32_t>(OperationType::ADD) == ANEURALNETWORKS_ADD,
+              "OperationType::ADD != ANEURALNETWORKS_ADD");
+static_assert(static_cast<int32_t>(OperationType::AVERAGE_POOL_2D) ==
+                          ANEURALNETWORKS_AVERAGE_POOL_2D,
+              "OperationType::AVERAGE_POOL_2D != ANEURALNETWORKS_AVERAGE_POOL_2D");
+static_assert(static_cast<int32_t>(OperationType::CONV_2D) == ANEURALNETWORKS_CONV_2D,
+              "OperationType::CONV_2D != ANEURALNETWORKS_CONV_2D");
+static_assert(static_cast<int32_t>(OperationType::DEPTHWISE_CONV_2D) ==
+                          ANEURALNETWORKS_DEPTHWISE_CONV_2D,
+              "OperationType::DEPTHWISE_CONV_2D != ANEURALNETWORKS_DEPTHWISE_CONV_2D");
+static_assert(static_cast<int32_t>(OperationType::DEPTH_TO_SPACE) ==
+                          ANEURALNETWORKS_DEPTH_TO_SPACE,
+              "OperationType::DEPTH_TO_SPACE != ANEURALNETWORKS_DEPTH_TO_SPACE");
+static_assert(static_cast<int32_t>(OperationType::DEQUANTIZE) == ANEURALNETWORKS_DEQUANTIZE,
+              "OperationType::DEQUANTIZE != ANEURALNETWORKS_DEQUANTIZE");
+static_assert(static_cast<int32_t>(OperationType::EMBEDDING_LOOKUP) ==
+                          ANEURALNETWORKS_EMBEDDING_LOOKUP,
+              "OperationType::EMBEDDING_LOOKUP != ANEURALNETWORKS_EMBEDDING_LOOKUP");
+static_assert(static_cast<int32_t>(OperationType::FLOOR) == ANEURALNETWORKS_FLOOR,
+              "OperationType::FLOOR != ANEURALNETWORKS_FLOOR");
+static_assert(static_cast<int32_t>(OperationType::FULLY_CONNECTED) ==
+                          ANEURALNETWORKS_FULLY_CONNECTED,
+              "OperationType::FULLY_CONNECTED != ANEURALNETWORKS_FULLY_CONNECTED");
+static_assert(static_cast<int32_t>(OperationType::HASHTABLE_LOOKUP) ==
+                          ANEURALNETWORKS_HASHTABLE_LOOKUP,
+              "OperationType::HASHTABLE_LOOKUP != ANEURALNETWORKS_HASHTABLE_LOOKUP");
+static_assert(static_cast<int32_t>(OperationType::L2_NORMALIZATION) ==
+                          ANEURALNETWORKS_L2_NORMALIZATION,
+              "OperationType::L2_NORMALIZATION != ANEURALNETWORKS_L2_NORMALIZATION");
+static_assert(static_cast<int32_t>(OperationType::L2_POOL_2D) == ANEURALNETWORKS_L2_POOL_2D,
+              "OperationType::L2_POOL_2D != ANEURALNETWORKS_L2_POOL_2D");
+static_assert(static_cast<int32_t>(OperationType::LOCAL_RESPONSE_NORMALIZATION) ==
+                          ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION,
+              "OperationType::LOCAL_RESPONSE_NORMALIZATION != "
+              "ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION");
+static_assert(static_cast<int32_t>(OperationType::LOGISTIC) == ANEURALNETWORKS_LOGISTIC,
+              "OperationType::LOGISTIC != ANEURALNETWORKS_LOGISTIC");
+static_assert(static_cast<int32_t>(OperationType::LSH_PROJECTION) ==
+                          ANEURALNETWORKS_LSH_PROJECTION,
+              "OperationType::LSH_PROJECTION != ANEURALNETWORKS_LSH_PROJECTION");
+static_assert(static_cast<int32_t>(OperationType::LSTM) == ANEURALNETWORKS_LSTM,
+              "OperationType::LSTM != ANEURALNETWORKS_LSTM");
+static_assert(static_cast<int32_t>(OperationType::MAX_POOL_2D) == ANEURALNETWORKS_MAX_POOL_2D,
+              "OperationType::MAX_POOL_2D != ANEURALNETWORKS_MAX_POOL_2D");
+static_assert(static_cast<int32_t>(OperationType::MUL) == ANEURALNETWORKS_MUL,
+              "OperationType::MUL != ANEURALNETWORKS_MUL");
+static_assert(static_cast<int32_t>(OperationType::RELU) == ANEURALNETWORKS_RELU,
+              "OperationType::RELU != ANEURALNETWORKS_RELU");
+static_assert(static_cast<int32_t>(OperationType::RELU1) == ANEURALNETWORKS_RELU1,
+              "OperationType::RELU1 != ANEURALNETWORKS_RELU1");
+static_assert(static_cast<int32_t>(OperationType::RELU6) == ANEURALNETWORKS_RELU6,
+              "OperationType::RELU6 != ANEURALNETWORKS_RELU6");
+static_assert(static_cast<int32_t>(OperationType::RESHAPE) == ANEURALNETWORKS_RESHAPE,
+              "OperationType::RESHAPE != ANEURALNETWORKS_RESHAPE");
+static_assert(static_cast<int32_t>(OperationType::RESIZE_BILINEAR) ==
+                          ANEURALNETWORKS_RESIZE_BILINEAR,
+              "OperationType::RESIZE_BILINEAR != ANEURALNETWORKS_RESIZE_BILINEAR");
+static_assert(static_cast<int32_t>(OperationType::RNN) == ANEURALNETWORKS_RNN,
+              "OperationType::RNN != ANEURALNETWORKS_RNN");
+static_assert(static_cast<int32_t>(OperationType::SOFTMAX) == ANEURALNETWORKS_SOFTMAX,
+              "OperationType::SOFTMAX != ANEURALNETWORKS_SOFTMAX");
+static_assert(static_cast<int32_t>(OperationType::SPACE_TO_DEPTH) ==
+                          ANEURALNETWORKS_SPACE_TO_DEPTH,
+              "OperationType::SPACE_TO_DEPTH != ANEURALNETWORKS_SPACE_TO_DEPTH");
+static_assert(static_cast<int32_t>(OperationType::SVDF) == ANEURALNETWORKS_SVDF,
+              "OperationType::SVDF != ANEURALNETWORKS_SVDF");
+static_assert(static_cast<int32_t>(OperationType::TANH) == ANEURALNETWORKS_TANH,
+              "OperationType::TANH != ANEURALNETWORKS_TANH");
+
+static_assert(static_cast<int32_t>(FusedActivationFunc::NONE) == ANEURALNETWORKS_FUSED_NONE,
+              "FusedActivationFunc::NONE != ANEURALNETWORKS_FUSED_NONE");
+static_assert(static_cast<int32_t>(FusedActivationFunc::RELU) == ANEURALNETWORKS_FUSED_RELU,
+              "FusedActivationFunc::RELU != ANEURALNETWORKS_FUSED_RELU");
+static_assert(static_cast<int32_t>(FusedActivationFunc::RELU1) == ANEURALNETWORKS_FUSED_RELU1,
+              "FusedActivationFunc::RELU1 != ANEURALNETWORKS_FUSED_RELU1");
+static_assert(static_cast<int32_t>(FusedActivationFunc::RELU6) == ANEURALNETWORKS_FUSED_RELU6,
+              "FusedActivationFunc::RELU6 != ANEURALNETWORKS_FUSED_RELU6");
+
+using android::sp;
+using namespace android::nn;
+#endif
+
+int ANeuralNetworksMemory_createFromFd(size_t size, int prot, int fd, size_t offset,
+                                       ANeuralNetworksMemory** memory) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    *memory = nullptr;
+    std::unique_ptr<MemoryFd> m = std::make_unique<MemoryFd>();
+    if (m == nullptr) {
+        return ANEURALNETWORKS_OUT_OF_MEMORY;
+    }
+    int n = m->set(size, prot, fd, offset);
+    if (n != ANEURALNETWORKS_NO_ERROR) {
+        return n;
+    }
+    *memory = reinterpret_cast<ANeuralNetworksMemory*>(m.release());
+    return ANEURALNETWORKS_NO_ERROR;
+#endif
+}
+
+void ANeuralNetworksMemory_free(ANeuralNetworksMemory* memory) {
+    // Original code for reference
+#if 0
+    // No validation.  Free of nullptr is valid.
+    Memory* m = reinterpret_cast<Memory*>(memory);
+    delete m;
+#endif
+}
+
+int ANeuralNetworksModel_create(ANeuralNetworksModel** model) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    initVLogMask();
+    if (!model) {
+        LOG(ERROR) << "ANeuralNetworksModel_create passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+    ModelBuilder* m = new ModelBuilder();
+    if (m == nullptr) {
+        *model = nullptr;
+        return ANEURALNETWORKS_OUT_OF_MEMORY;
+    }
+    *model = reinterpret_cast<ANeuralNetworksModel*>(m);
+    return ANEURALNETWORKS_NO_ERROR;
+#endif
+}
+
+void ANeuralNetworksModel_free(ANeuralNetworksModel* model) {
+    // Original code for reference
+#if 0
+    // No validation.  Free of nullptr is valid.
+    ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
+    delete m;
+#endif
+}
+
+int ANeuralNetworksModel_finish(ANeuralNetworksModel* model) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    if (!model) {
+        LOG(ERROR) << "ANeuralNetworksModel_finish passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+    ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
+    return m->finish();
+#endif
+}
+
+int ANeuralNetworksModel_addOperand(ANeuralNetworksModel* model,
+                                    const ANeuralNetworksOperandType* type) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    if (!model || !type) {
+        LOG(ERROR) << "ANeuralNetworksModel_addOperand passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+    ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
+    return m->addOperand(*type);
+#endif
+}
+
+int ANeuralNetworksModel_setOperandValue(ANeuralNetworksModel* model, int32_t index,
+                                         const void* buffer, size_t length) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    if (!model || !buffer) {
+        LOG(ERROR) << "ANeuralNetworksModel_setOperandValue passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+    ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
+    return m->setOperandValue(index, buffer, length);
+#endif
+}
+
+int ANeuralNetworksModel_setOperandValueFromMemory(ANeuralNetworksModel* model, int32_t index,
+                                                   const ANeuralNetworksMemory* memory,
+                                                   size_t offset, size_t length) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    if (!model || !memory) {
+        LOG(ERROR) << "ANeuralNetworksModel_setOperandValue passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+    const Memory* mem = reinterpret_cast<const Memory*>(memory);
+    ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
+    return m->setOperandValueFromMemory(index, mem, offset, length);
+#endif
+}
+
+int ANeuralNetworksModel_addOperation(ANeuralNetworksModel* model,
+                                      ANeuralNetworksOperationType type, uint32_t inputCount,
+                                      const uint32_t* inputs, uint32_t outputCount,
+                                      const uint32_t* outputs) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    if (!model || !inputs || !outputs) {
+        LOG(ERROR) << "ANeuralNetworksModel_addOperation passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+    ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
+    return m->addOperation(type, inputCount, inputs, outputCount, outputs);
+#endif
+}
+
+int ANeuralNetworksModel_identifyInputsAndOutputs(ANeuralNetworksModel* model, uint32_t inputCount,
+                                                  const uint32_t* inputs, uint32_t outputCount,
+                                                  const uint32_t* outputs) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    if (!model || !inputs || !outputs) {
+        LOG(ERROR) << ("ANeuralNetworksModel_identifyInputsAndOutputs passed a nullptr");
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+    ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
+    return m->identifyInputsAndOutputs(inputCount, inputs, outputCount, outputs);
+#endif
+}
+
+int ANeuralNetworksCompilation_create(ANeuralNetworksModel* model,
+                                      ANeuralNetworksCompilation** compilation) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    if (!model || !compilation) {
+        LOG(ERROR) << "ANeuralNetworksCompilation_create passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+
+    ModelBuilder* m = reinterpret_cast<ModelBuilder*>(model);
+    CompilationBuilder* c = nullptr;
+    int result = m->createCompilation(&c);
+    *compilation = reinterpret_cast<ANeuralNetworksCompilation*>(c);
+    return result;
+#endif
+}
+
+void ANeuralNetworksCompilation_free(ANeuralNetworksCompilation* compilation) {
+    // Original code for reference
+#if 0
+    // No validation.  Free of nullptr is valid.
+    // TODO specification says that a compilation-in-flight can be deleted
+    CompilationBuilder* c = reinterpret_cast<CompilationBuilder*>(compilation);
+    delete c;
+#endif
+}
+
+int ANeuralNetworksCompilation_setPreference(ANeuralNetworksCompilation* compilation,
+                                             int32_t preference) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    if (!compilation) {
+        LOG(ERROR) << "ANeuralNetworksCompilation_setPreference passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+    CompilationBuilder* c = reinterpret_cast<CompilationBuilder*>(compilation);
+    return c->setPreference(preference);
+#endif
+}
+
+int ANeuralNetworksCompilation_finish(ANeuralNetworksCompilation* compilation) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    if (!compilation) {
+        LOG(ERROR) << "ANeuralNetworksCompilation_finish passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+    CompilationBuilder* c = reinterpret_cast<CompilationBuilder*>(compilation);
+    return c->finish();
+#endif
+}
+
+int ANeuralNetworksExecution_create(ANeuralNetworksCompilation* compilation,
+                                    ANeuralNetworksExecution** execution) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    if (!compilation || !execution) {
+        LOG(ERROR) << "ANeuralNetworksExecution_create passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+
+    CompilationBuilder* c = reinterpret_cast<CompilationBuilder*>(compilation);
+    ExecutionBuilder* r = nullptr;
+    int result = c->createExecution(&r);
+    *execution = reinterpret_cast<ANeuralNetworksExecution*>(r);
+    return result;
+#endif
+}
+
+void ANeuralNetworksExecution_free(ANeuralNetworksExecution* execution) {
+    // Original code for reference
+#if 0
+    // TODO specification says that an execution-in-flight can be deleted
+    // No validation.  Free of nullptr is valid.
+    ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
+    delete r;
+#endif
+}
+
+int ANeuralNetworksExecution_setInput(ANeuralNetworksExecution* execution, int32_t index,
+                                      const ANeuralNetworksOperandType* type, const void* buffer,
+                                      size_t length) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    // TODO: For a non-optional input, also verify that buffer is not null.
+    if (!execution) {
+        LOG(ERROR) << "ANeuralNetworksExecution_setInput passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+    ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
+    return r->setInput(index, type, buffer, length);
+#endif
+}
+
+int ANeuralNetworksExecution_setInputFromMemory(ANeuralNetworksExecution* execution, int32_t index,
+                                                const ANeuralNetworksOperandType* type,
+                                                const ANeuralNetworksMemory* memory, size_t offset,
+                                                size_t length) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    if (!execution || !memory) {
+        LOG(ERROR) << "ANeuralNetworksExecution_setInputFromMemory passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+
+    const Memory* m = reinterpret_cast<const Memory*>(memory);
+    ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
+    return r->setInputFromMemory(index, type, m, offset, length);
+#endif
+}
+
+int ANeuralNetworksExecution_setOutput(ANeuralNetworksExecution* execution, int32_t index,
+                                       const ANeuralNetworksOperandType* type, void* buffer,
+                                       size_t length) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    if (!execution || !buffer) {
+        LOG(ERROR) << "ANeuralNetworksExecution_setOutput passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+    ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
+    return r->setOutput(index, type, buffer, length);
+#endif
+}
+
+int ANeuralNetworksExecution_setOutputFromMemory(ANeuralNetworksExecution* execution, int32_t index,
+                                                 const ANeuralNetworksOperandType* type,
+                                                 const ANeuralNetworksMemory* memory, size_t offset,
+                                                 size_t length) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    if (!execution || !memory) {
+        LOG(ERROR) << "ANeuralNetworksExecution_setOutputFromMemory passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+
+    ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
+    const Memory* m = reinterpret_cast<const Memory*>(memory);
+    return r->setOutputFromMemory(index, type, m, offset, length);
+#endif
+}
+
+int ANeuralNetworksExecution_startCompute(ANeuralNetworksExecution* execution,
+                                          ANeuralNetworksEvent** event) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    if (!execution || !event) {
+        LOG(ERROR) << "ANeuralNetworksExecution_startCompute passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+    // TODO validate the rest
+
+    ExecutionBuilder* r = reinterpret_cast<ExecutionBuilder*>(execution);
+
+    // Dynamically allocate an sp to wrap an ExecutionCallback, seen in the NN
+    // API as an abstract event object. The sp<ExecutionCallback> object is
+    // returned when the execution has been successfully launched, otherwise a
+    // nullptr is returned. The sp is used for ref-counting purposes. Without
+    // it, the HIDL service could attempt to communicate with a dead callback
+    // object.
+    std::unique_ptr<sp<ExecutionCallback>> e = std::make_unique<sp<ExecutionCallback>>();
+    *event = nullptr;
+
+    int n = r->startCompute(e.get());
+    if (n != ANEURALNETWORKS_NO_ERROR) {
+        return n;
+    }
+    *event = reinterpret_cast<ANeuralNetworksEvent*>(e.release());
+    return ANEURALNETWORKS_NO_ERROR;
+#endif
+}
+
+int ANeuralNetworksEvent_wait(ANeuralNetworksEvent* event) {
+    // Dummy Implementation
+    return 0;
+
+    // Original code for reference
+#if 0
+    if (event == nullptr) {
+        LOG(ERROR) << "ANeuralNetworksEvent_wait passed a nullptr";
+        return ANEURALNETWORKS_UNEXPECTED_NULL;
+    }
+
+    sp<ExecutionCallback>* e = reinterpret_cast<sp<ExecutionCallback>*>(event);
+    (*e)->wait();
+    return ANEURALNETWORKS_NO_ERROR;
+#endif
+}
+
+void ANeuralNetworksEvent_free(ANeuralNetworksEvent* event) {
+    // Original code for reference
+#if 0
+    // No validation.  Free of nullptr is valid.
+    if (event) {
+        sp<ExecutionCallback>* e = reinterpret_cast<sp<ExecutionCallback>*>(event);
+        (*e)->wait();
+        delete e;
+    }
+#endif
+}