elif model_content and not model_path:
self._interpreter = (
_interpreter_wrapper.InterpreterWrapper_CreateWrapperCPPFromBuffer(
- str(model_content), len(model_content)))
+ model_content))
if not self._interpreter:
raise ValueError(
'Failed to create model from {} bytes'.format(len(model_content)))
}
InterpreterWrapper* InterpreterWrapper::CreateWrapperCPPFromBuffer(
- const char* data, size_t len) {
+ PyObject* data) {
+ char * buf = nullptr;
+ Py_ssize_t length;
+ if (PY_TO_CPPSTRING(data, &buf, &length) == -1) {
+ return nullptr;
+ }
std::unique_ptr<tflite::FlatBufferModel> model =
- tflite::FlatBufferModel::BuildFromBuffer(data, len);
+ tflite::FlatBufferModel::BuildFromBuffer(buf, length);
return model ? new InterpreterWrapper(std::move(model)) : nullptr;
}
static InterpreterWrapper* CreateWrapperCPPFromFile(const char* model_path);
// SWIG caller takes ownership of pointer.
- static InterpreterWrapper* CreateWrapperCPPFromBuffer(const char* data,
- size_t len);
+ static InterpreterWrapper* CreateWrapperCPPFromBuffer(PyObject* data);
~InterpreterWrapper();
bool AllocateTensors();