This will import TensorFlow Placeholder node as TFPlaceholder IR instead of loco::Pull
- graph input shape setting has moved to Importer class as we cannot query node by name in Frontend
Signed-off-by: SaeHie Park <saehie.park@samsung.com>
INFO(frontend) << ">>";
tfoptimizier.optimize(graph.get());
- // Fill graph-level input/output shape
- //
- // ASSUMPTION! All the shapes are known at this point
+// Fill graph-level input/output shape
+//
+// ASSUMPTION! All the shapes are known at this point
+#if 0
for (uint32_t n = 0; n < graph->inputs()->size(); ++n)
{
+ // NOTE Input shape is set inside Importer
+ // TODO Enable this when TF dialect supports input query and we use it
auto input = graph->inputs()->at(n);
- auto input_node = loco::pull_node(graph.get(), n);
+ auto input_node = ...
assert(input_node != nullptr);
input->shape(stdex::make_unique<loco::TensorShape>(tensor_shape(input_node)));
}
+#endif
for (uint32_t n = 0; n < graph->outputs()->size(); ++n)
{
#include "Transforms.h"
#include "ProgressReporter.h"
+#include <moco/IR/Nodes/TFPlaceholder.h>
+
#include <moco/Log.h>
#include <loco/IR/Verifier.h>
auto graph_input = graph->inputs()->create();
- loco::Pull *pull_node = dynamic_cast<loco::Pull *>(node);
- assert(pull_node != nullptr);
+ auto placeholder_node = dynamic_cast<moco::TFPlaceholder *>(node);
+ assert(placeholder_node != nullptr);
graph_input->name(input.nodeName());
+
+ // annotate index that should be passed to loco::Pull
+ moco::index(placeholder_node, graph_input->index());
+
+ // Use placeholder internal shape to graph_input shape
+ // Currently, signature has no shape information
+ // TODO graph input shape setting may move to Frontend
+ auto tensorshape = moco::tensor_shape(placeholder_node);
+ graph_input->shape(stdex::make_unique<loco::TensorShape>(tensorshape));
+
// This implementation works as "PlaceholderGraphBuilder in Op/PlaceholderGraphBuilder.cpp"
// accepts only TF_FLOAT32 as of now.
//
// TODO Support other types
graph_input->dtype(loco::DataType::FLOAT32);
- loco::link(graph_input, pull_node);
}
/**
#include "Placeholder.h"
+#include <moco/IR/Nodes/TFPlaceholder.h>
+
#include <moco/tf/Names.h>
#include <plier/tf/Convert.h>
// TODO support other types
assert(dtype == loco::DataType::FLOAT32);
- // Create a "pull" node as an input
- auto pull_node = graph->nodes()->create<loco::Pull>();
+ // Create a "Placeholder" node as an input
+ auto placeholder_node = graph->nodes()->create<moco::TFPlaceholder>();
- pull_node->dtype(dtype);
+ placeholder_node->dtype(dtype);
// Setting shape info.
- pull_node->rank(num_dims);
+ placeholder_node->rank(num_dims);
for (int64_t d = 0; d < num_dims; d++)
{
assert(shape.dim(d).size() < std::numeric_limits<uint32_t>::max());
if (dim_value >= 0)
{
uint32_t dim_value32 = static_cast<uint32_t>(dim_value);
- pull_node->dim(d) = dim_value32;
+ placeholder_node->dim(d) = dim_value32;
}
else
{
- pull_node->dim(d).unset();
+ placeholder_node->dim(d).unset();
// TODO Remove assert() and do implement
// NOTE Current implementation assumes dim is all know
assert(false);
// register string-name to node
TensorName output_name(node.name(), 0);
- tensor_names->enroll(output_name, pull_node);
+ tensor_names->enroll(output_name, placeholder_node);
}
} // namespace tf
// what to test:
// - TFPlaceholder node should exist
+ // - shape attribute should match
tester.inputs({});
tester.output("placeholder");
tester.run(nodedef, graphbuilder);
-// TODO enable this after Placeholder is built as TFPlaceholder
-#if 0
- auto test_node = dynamic_cast<moco::tf::TFPlaceholder *>(tester.output());
+ auto test_node = dynamic_cast<moco::TFPlaceholder *>(tester.output());
+ assert(test_node != nullptr);
ASSERT_TRUE(test_node->dim(0).known() && test_node->dim(0).value() == 1024);
ASSERT_TRUE(test_node->dim(1).known() && test_node->dim(1).value() == 2);
ASSERT_TRUE(test_node->dim(2).known() && test_node->dim(2).value() == 3);
ASSERT_TRUE(test_node->dim(3).known() && test_node->dim(3).value() == 4);
-#endif
}