};
//NodeInfo::setProperty() end
-//Source::inputTensorsInfo begin
+//Source::inputData() begin
+var ValidSourceInputDataExceptions = [
+ 'InvalidStateError',
+ 'NotFoundError',
+ 'NotSupportedError',
+ 'AbortError'
+];
+Source.prototype.inputData = function() {
+ var args = validator_.validateArgs(arguments, [
+ {
+ name: 'data',
+ type: types_.PLATFORM_OBJECT,
+ values: TensorsData
+ }
+ ]);
+
+ var nativeArgs = {
+ id: this._pipeline_id,
+ name: this.name,
+ tensorsDataId: args.data._id
+ };
-//Source::inputTensorsInfo end
+ var result = native_.callSync('MLPipelineSourceInputData', nativeArgs);
-//Source::inputData() begin
+ if (native_.isFailure(result)) {
+ throw native_.getErrorObjectAndValidate(
+ result,
+ ValidSourceInputDataExceptions,
+ AbortError
+ );
+ }
+ return result.result;
+};
//Source::inputData() end
//Switch::getPadList() begin
REGISTER_METHOD(MLPipelineNodeInfoSetProperty);
REGISTER_METHOD(MLPipelineGetSource);
REGISTER_METHOD(MLPipelineGetInputTensorsInfo);
+ REGISTER_METHOD(MLPipelineSourceInputData);
// Pipeline API end
#undef REGISTER_METHOD
// Source::inputTensorsInfo end
// Source::inputData() begin
+void MlInstance::MLPipelineSourceInputData(const picojson::value& args, picojson::object& out) {
+ ScopeLogger("args: [%s]", args.serialize().c_str());
+
+ CHECK_ARGS(args, kId, double, out);
+ CHECK_ARGS(args, kName, std::string, out);
+ CHECK_ARGS(args, kTensorsDataId, double, out);
+ auto pipeline_id = static_cast<int>(args.get(kId).get<double>());
+ auto& source_name = args.get(kName).get<std::string>();
+ auto tensor_data_id = static_cast<int>(args.get(kTensorsDataId).get<double>());
+
+ TensorsData* tensors_data = GetTensorsDataManager().GetTensorsData(tensor_data_id);
+
+ if (nullptr == tensors_data) {
+ LogAndReportError(PlatformResult(ErrorCode::ABORT_ERR, "Internal Source error"), &out,
+ ("Could not get TensorData handle with given id: %d", tensor_data_id));
+ return;
+ }
+
+ auto ret = pipeline_manager_.SourceInputData(pipeline_id, source_name, tensors_data);
+ if (!ret) {
+ LogAndReportError(ret, &out);
+ return;
+ }
+
+ ReportSuccess(out);
+}
// Source::inputData() end
// Switch::getPadList() begin
// Source::inputTensorsInfo end
// Source::inputData() begin
-
+ void MLPipelineSourceInputData(const picojson::value& args, picojson::object& out);
// Source::inputData() end
// Switch::getPadList() begin
// Source::inputTensorsInfo end
// Source::inputData() begin
+PlatformResult Pipeline::SourceInputData(const std::string& name, TensorsData* tensors_data) {
+ ScopeLogger();
+
+ auto source_it = sources_.find(name);
+ if (sources_.end() == source_it) {
+ LoggerD("Source [%s] not found", name.c_str());
+ return PlatformResult{ErrorCode::NOT_FOUND_ERR, "Source not found"};
+ }
+ return source_it->second->SourceInputData(tensors_data);
+}
// Source::inputData() end
// Switch::getPadList() begin
#include "ml_pipeline_source.h"
#include "ml_pipeline_switch.h"
#include "ml_pipeline_valve.h"
+#include "ml_tensors_data_manager.h"
using common::PlatformResult;
using namespace extension::ml::pipeline;
// Source::inputTensorsInfo end
// Source::inputData() begin
-
+ PlatformResult SourceInputData(const std::string& name, TensorsData* tensors_data);
// Source::inputData() end
// Switch::getPadList() begin
// Source::inputTensorsInfo end
// Source::inputData() begin
+PlatformResult PipelineManager::SourceInputData(int id, const std::string& name,
+ TensorsData* tensors_data) {
+ ScopeLogger();
+
+ auto pipeline_it = pipelines_.find(id);
+ if (pipelines_.end() == pipeline_it) {
+ LoggerD("Pipeline not found: [%d]", id);
+ return PlatformResult{ErrorCode::NOT_FOUND_ERR, "Pipeline not found"};
+ }
+ return pipeline_it->second->SourceInputData(name, tensors_data);
+}
// Source::inputData() end
// Switch::getPadList() begin
#include "common/platform_result.h"
#include "ml_pipeline.h"
+#include "ml_tensors_data_manager.h"
#include "ml_tensors_info_manager.h"
using common::PlatformResult;
// Source::inputTensorsInfo end
// Source::inputData() begin
-
+ PlatformResult SourceInputData(int id, const std::string& name, TensorsData* tensors_data);
// Source::inputData() end
// Switch::getPadList() begin
return PlatformResult{};
}
+PlatformResult Source::SourceInputData(TensorsData* tensors_data) {
+ ScopeLogger();
+
+ ml_tensors_data_h tensor_data_handle = tensors_data ? tensors_data->Handle() : nullptr;
+
+ auto ret =
+ ml_pipeline_src_input_data(source_, tensor_data_handle, ML_PIPELINE_BUF_POLICY_DO_NOT_FREE);
+ if (ML_ERROR_NONE != ret) {
+ LoggerE(" ml_pipeline_src_input_data failed: %d (%s)", ret, get_error_message(ret));
+ return util::ToPlatformResult(ret, "Failed to get tensor info");
+ }
+
+ return PlatformResult{};
+}
+
} // namespace pipeline
} // namespace ml
} // namespace extension
\ No newline at end of file
#include "common/platform_result.h"
+#include "ml_tensors_data_manager.h"
+
using common::PlatformResult;
namespace extension {
~Source();
PlatformResult getInputTensorsInfo(ml_tensors_info_h* result);
+ PlatformResult SourceInputData(TensorsData* tensors_data);
Source(const Source&) = delete;
Source& operator=(const Source&) = delete;