// TensorsData
+var _ValidTensorsDataIds = new Set();
+
+function _CheckIfTensorsDataNotDisposed(id) {
+ if (false == _ValidTensorsDataIds.has(id)) {
+ throw new WebAPIException(WebAPIException.ABORT_ERR, 'TensorsData is disposed');
+ }
+}
+
var TensorsData = function(id, tensorsInfoId) {
Object.defineProperties(this, {
count: {
tensorsInfo: {
enumerable: true,
get: function() {
+ _CheckIfTensorsDataNotDisposed(this._id);
return this._tensorsInfo.clone();
}
},
enumerable: false
}
});
+ _ValidTensorsDataIds.add(id);
};
TensorsData.prototype.getTensorRawData = function() {
+ _CheckIfTensorsDataNotDisposed();
throw new WebAPIException(WebAPIException.ABORT_ERR, 'Not implemented');
};
TensorsData.prototype.setTensorData = function() {
+ _CheckIfTensorsDataNotDisposed();
throw new WebAPIException(WebAPIException.ABORT_ERR, 'Not implemented');
};
TensorsData.prototype.dispose = function() {
- throw new WebAPIException(WebAPIException.ABORT_ERR, 'Not implemented');
+ if (false == _ValidTensorsDataIds.has(this._id)) {
+ privUtils_.log('TensorsData already disposed');
+ return;
+ }
+ var callArgs = {
+ tensorsDataId: this._id
+ };
+
+ var result = native_.callSync('MLTensorsDataDispose', callArgs);
+
+ if (native_.isFailure(result)) {
+ return;
+ }
+ _ValidTensorsDataIds['delete'](this._id);
+ // underlying tensorsInfo_ is also invalid
+ _ValidTensorsInfoIds['delete'](this._tensorsInfo._id);
};
// TensorsInfo
REGISTER_METHOD(MLTensorsInfoEquals);
REGISTER_METHOD(MLTensorsInfoDispose);
+ REGISTER_METHOD(MLTensorsDataDispose);
+
// Single API begin
REGISTER_METHOD(MLSingleManagerOpenModel);
// MachineLearningSingle::openModelAsync()
}
ReportSuccess(out);
}
+
+void MlInstance::MLTensorsDataDispose(const picojson::value& args, picojson::object& out) {
+ ScopeLogger("args: %s", args.serialize().c_str());
+ CHECK_ARGS(args, kTensorsDataId, double, out);
+ int tensors_data_id = static_cast<int>(args.get(kTensorsDataId).get<double>());
+
+ TensorsData* tensors_data = GetTensorsDataManager().GetTensorsData(tensors_data_id);
+ if (nullptr == tensors_data) {
+ LogAndReportError(PlatformResult(ErrorCode::ABORT_ERR, "Internal TensorsData error"), &out,
+ ("Could not find TensorsData handle with given id: %d", tensors_data_id));
+ return;
+ }
+ // Dispose underlying tensorsInfo
+ PlatformResult result = GetTensorsInfoManager().DisposeTensorsInfo(tensors_data->TensorsInfoId());
+ if (!result) {
+ LogAndReportError(result, &out);
+ return;
+ }
+
+ result = GetTensorsDataManager().DisposeTensorsData(tensors_data_id);
+ if (!result) {
+ LogAndReportError(result, &out);
+ return;
+ }
+ ReportSuccess(out);
+}
// Common ML API end
// Single API begin
void MLTensorsInfoClone(const picojson::value& args, picojson::object& out);
void MLTensorsInfoEquals(const picojson::value& args, picojson::object& out);
void MLTensorsInfoDispose(const picojson::value& args, picojson::object& out);
+
+ void MLTensorsDataDispose(const picojson::value& args, picojson::object& out);
+
TensorsInfoManager tensors_info_manager_;
TensorsDataManager tensors_data_manager_;
// Common ML API end
return nullptr;
}
+PlatformResult TensorsDataManager::DisposeTensorsData(int id) {
+ ScopeLogger("id: %d", id);
+
+ TensorsData* t = GetTensorsData(id);
+
+ return DisposeTensorsData(t);
+}
+
+PlatformResult TensorsDataManager::DisposeTensorsData(TensorsData* t) {
+ ScopeLogger();
+
+ if (nullptr == t) {
+ LoggerE("Could not find tensor");
+ return PlatformResult(ErrorCode::ABORT_ERR);
+ }
+
+ map_.erase(t->Id());
+
+ return PlatformResult(ErrorCode::NO_ERROR);
+}
+
} // ml
} // extension
TensorsData* CreateTensorsData(TensorsInfo* tensors_info);
TensorsData* GetTensorsData(int id);
+ PlatformResult DisposeTensorsData(int id);
+ PlatformResult DisposeTensorsData(TensorsData* t);
+
private:
TensorsDataManager(TensorsDataManager const&) = delete;
TensorsDataManager& operator=(TensorsDataManager const&) = delete;