* this only acts as a hint to the user,
* and is not used for equality.
*/
- std::string name_hint;
+ String name_hint;
void VisitAttrs(tvm::AttrVisitor* v) { v->Visit("name_hint", &name_hint); }
* \brief The constructor
* \param name_hint The name of the variable.
*/
- TVM_DLL explicit Id(std::string name_hint);
+ TVM_DLL explicit Id(String name_hint);
TVM_DEFINE_OBJECT_REF_METHODS(Id, ObjectRef, IdNode);
};
Type type_annotation;
/*! \return The name hint of the variable */
- const std::string& name_hint() const { return vid->name_hint; }
+ const String& name_hint() const { return vid->name_hint; }
void VisitAttrs(tvm::AttrVisitor* v) {
v->Visit("vid", &vid);
hash_reduce.FreeVarHashImpl(this);
}
- TVM_DLL static Var make(std::string name_hint, Type type_annotation);
+ TVM_DLL static Var make(String name_hint, Type type_annotation);
TVM_DLL static Var make(Id vid, Type type_annotation);
* \param name_hint The name hint of a variable.
* \param type_annotation The type annotation of a variable.
*/
- TVM_DLL Var(std::string name_hint, Type type_annotation) : Var(Id(name_hint), type_annotation) {}
+ TVM_DLL Var(String name_hint, Type type_annotation) : Var(Id(name_hint), type_annotation) {}
/*!
* \brief The constructor
* \param name Name of the implementation
* \param plevel Priority level of the implementation
*/
- TVM_DLL void AddImplementation(FTVMCompute fcompute, FTVMSchedule fschedule, std::string name,
+ TVM_DLL void AddImplementation(FTVMCompute fcompute, FTVMSchedule fschedule, String name,
int plevel);
TVM_DEFINE_MUTABLE_OBJECT_REF_METHODS(OpSpecialization, ObjectRef, OpSpecializationNode);
* \param name Name of the implementation
* \param plevel Priority level of the implementation
*/
- TVM_DLL void AddImplementation(FTVMCompute fcompute, FTVMSchedule fschedule, std::string name,
+ TVM_DLL void AddImplementation(FTVMCompute fcompute, FTVMSchedule fschedule, String name,
int plevel);
TVM_DEFINE_MUTABLE_OBJECT_REF_METHODS(OpStrategy, ObjectRef, OpStrategyNode);
*/
TVM_DLL Pass CreateFunctionPass(
const runtime::TypedPackedFunc<Function(Function, IRModule, PassContext)>& pass_func,
- int opt_level, const std::string& name, const tvm::Array<runtime::String>& required);
+ int opt_level, const String& name, const tvm::Array<runtime::String>& required);
/*! \brief Remove expressions which does not effect the program result.
*
*
* \return The pass.
*/
-TVM_DLL Pass Legalize(const std::string& legalize_map_attr_name = "FTVMLegalize");
+TVM_DLL Pass Legalize(const String& legalize_map_attr_name = "FTVMLegalize");
/*!
* \brief Canonicalize cast expressions to make operator fusion more efficient.
* an Expr consumed by multiple callers.
* \return The rewritten expression.
*/
-TVM_DLL Expr ForwardRewrite(const Expr& expr, const std::string& rewrite_map_attr_name,
+TVM_DLL Expr ForwardRewrite(const Expr& expr, const String& rewrite_map_attr_name,
std::function<ObjectRef(const Call&)> fcontext = nullptr,
std::function<Expr(const Expr&)> fmulti_ref_trigger = nullptr);
"EnvFunc": _update_global_key,
"relay.Op": _update_global_key,
"relay.TypeVar": [_ftype_var, _update_from_std_str("name_hint")],
+ "relay.Id": [_update_from_std_str("name_hint")],
"relay.GlobalTypeVar": [_ftype_var, _update_from_std_str("name_hint")],
"relay.Type": _rename("Type"),
"relay.TupleType": _rename("TupleType"),
@property
def name_hint(self):
"""Get name hint of the current var."""
- name = self.vid.name_hint
+ name = str(self.vid.name_hint)
return name
auto symbol_name = src_func->GetAttr<String>(tvm::attr::kGlobalSymbol);
CHECK(symbol_name.defined()) << "No external symbol is set for:\n"
<< AsText(src_func, false);
- auto gv = GlobalVar(std::string(symbol_name.value()));
+ auto gv = GlobalVar(symbol_name.value());
// No need to keep compiler attribute at this point, functions have been
// extracted for specific codegen.
src_func = WithAttr(std::move(src_func), attr::kCompiler, NullValue<ObjectRef>());
TVM_REGISTER_NODE_TYPE(IdNode);
-Id::Id(std::string name_hint) {
+Id::Id(String name_hint) {
ObjectPtr<IdNode> n = make_object<IdNode>();
n->name_hint = std::move(name_hint);
data_ = std::move(n);
TVM_REGISTER_NODE_TYPE(VarNode);
-TVM_REGISTER_GLOBAL("relay.ir.Var").set_body_typed([](std::string str, Type type_annotation) {
+TVM_REGISTER_GLOBAL("relay.ir.Var").set_body_typed([](String str, Type type_annotation) {
return Var(str, type_annotation);
});
}
void OpSpecialization::AddImplementation(tvm::relay::FTVMCompute fcompute,
- tvm::relay::FTVMSchedule fschedule, std::string name,
+ tvm::relay::FTVMSchedule fschedule, String name,
int plevel) {
auto n = make_object<OpImplementationNode>();
n->fcompute = fcompute;
(*this)->implementations.push_back(OpImplementation(n));
}
-void OpStrategy::AddImplementation(FTVMCompute fcompute, FTVMSchedule fschedule, std::string name,
+void OpStrategy::AddImplementation(FTVMCompute fcompute, FTVMSchedule fschedule, String name,
int plevel) {
auto curr_cond = te::SpecializedCondition::Current();
auto self = this->operator->();
Pass CreateFunctionPass(
const runtime::TypedPackedFunc<Function(Function, IRModule, PassContext)>& pass_func,
- int opt_level, const std::string& name, const tvm::Array<runtime::String>& required) {
+ int opt_level, const String& name, const tvm::Array<runtime::String>& required) {
PassInfo pass_info = PassInfo(opt_level, name, required);
return FunctionPass(pass_func, pass_info);
}
return true;
}
-Expr MakeTopK(Expr data, int k, int axis, std::string ret_type, bool is_ascend, DataType dtype) {
+Expr MakeTopK(Expr data, int k, int axis, String ret_type, bool is_ascend, DataType dtype) {
auto attrs = make_object<TopKAttrs>();
attrs->k = k;
attrs->axis = axis;
});
TVM_REGISTER_GLOBAL("relay.op.annotation._make.compiler_begin")
- .set_body_typed([](Expr expr, std::string compiler) {
+ .set_body_typed([](Expr expr, String compiler) {
auto attrs = make_object<CompilerAttrs>();
attrs->compiler = compiler;
static const Op& op = Op::Get("annotation.compiler_begin");
});
TVM_REGISTER_GLOBAL("relay.op.annotation._make.compiler_end")
- .set_body_typed([](Expr expr, std::string compiler) {
+ .set_body_typed([](Expr expr, String compiler) {
auto attrs = make_object<CompilerAttrs>();
attrs->compiler = compiler;
static const Op& op = Op::Get("annotation.compiler_end");
.set_attr<TOpPattern>("TOpPattern", kOpaque)
.set_attr<FTVMCompute>("FTVMCompute", DebugCompute);
-Expr MakeDebug(Expr expr, std::string name) {
+Expr MakeDebug(Expr expr, String name) {
auto dattrs = make_object<DebugAttrs>();
if (name.size() > 0) {
dattrs->debug_func = EnvFunc::Get(name);
// Positional relay function to create dilation2d operator
// used by frontend FFI.
Expr MakeDilation2D(Expr data, Expr weight, Array<IndexExpr> strides, Array<IndexExpr> padding,
- Array<IndexExpr> dilations, std::string data_layout, std::string kernel_layout,
+ Array<IndexExpr> dilations, String data_layout, String kernel_layout,
DataType out_dtype) {
auto attrs = make_object<Dilation2DAttrs>();
attrs->strides = std::move(strides);
// Positional relay function to create image operator
// used by frontend FFI.
-Expr MakeResize(Expr data, Array<IndexExpr> size, std::string layout, std::string method,
- std::string coordinate_transformation_mode, DataType out_dtype) {
+Expr MakeResize(Expr data, Array<IndexExpr> size, String layout, String method,
+ String coordinate_transformation_mode, DataType out_dtype) {
auto attrs = make_object<ResizeAttrs>();
attrs->size = std::move(size);
attrs->layout = std::move(layout);
}
Expr MakeCropAndResize(Expr data, Expr boxes, Expr box_indices, Array<IndexExpr> crop_size,
- std::string layout, std::string method, double extrapolation_value,
+ String layout, String method, double extrapolation_value,
DataType out_dtype) {
auto attrs = make_object<CropAndResizeAttrs>();
attrs->crop_size = std::move(crop_size);
}
Expr MakeBitPack(Expr data, int bits, int pack_axis, int bit_axis, DataType pack_type,
- std::string name) {
+ String name) {
auto attrs = make_object<BitPackAttrs>();
attrs->bits = bits;
attrs->pack_axis = pack_axis;
// used by frontend FFI.
Expr MakeBinaryConv2D(Expr data, Expr weight, Array<IndexExpr> strides, Array<IndexExpr> padding,
IndexExpr channels, Array<IndexExpr> kernel_size, int activation_bits,
- int weight_bits, std::string data_layout, std::string kernel_layout,
+ int weight_bits, String data_layout, String kernel_layout,
DataType pack_dtype, DataType out_dtype, bool unipolar) {
auto attrs = make_object<BinaryConv2DAttrs>();
attrs->strides = std::move(strides);
TVM_REGISTER_GLOBAL("relay.op.nn._make.conv1d")
.set_body_typed([](Expr data, Expr weight, Array<IndexExpr> strides, Array<IndexExpr> padding,
Array<IndexExpr> dilation, int groups, IndexExpr channels,
- Array<IndexExpr> kernel_size, std::string data_layout,
- std::string kernel_layout, std::string out_layout, DataType out_dtype) {
+ Array<IndexExpr> kernel_size, String data_layout, String kernel_layout,
+ String out_layout, DataType out_dtype) {
return MakeConv<Conv1DAttrs>(data, weight, strides, padding, dilation, groups, channels,
kernel_size, data_layout, kernel_layout, out_layout, out_dtype,
"nn.conv1d");
TVM_REGISTER_GLOBAL("relay.op.nn._make.conv2d")
.set_body_typed([](Expr data, Expr weight, Array<IndexExpr> strides, Array<IndexExpr> padding,
Array<IndexExpr> dilation, int groups, IndexExpr channels,
- Array<IndexExpr> kernel_size, std::string data_layout,
- std::string kernel_layout, std::string out_layout, DataType out_dtype) {
+ Array<IndexExpr> kernel_size, String data_layout, String kernel_layout,
+ String out_layout, DataType out_dtype) {
return MakeConv<Conv2DAttrs>(data, weight, strides, padding, dilation, groups, channels,
kernel_size, data_layout, kernel_layout, out_layout, out_dtype,
"nn.conv2d");
TVM_REGISTER_GLOBAL("relay.op.nn._make.conv3d")
.set_body_typed([](Expr data, Expr weight, Array<IndexExpr> strides, Array<IndexExpr> padding,
Array<IndexExpr> dilation, int groups, IndexExpr channels,
- Array<IndexExpr> kernel_size, std::string data_layout,
- std::string kernel_layout, std::string out_layout, DataType out_dtype) {
+ Array<IndexExpr> kernel_size, String data_layout, String kernel_layout,
+ String out_layout, DataType out_dtype) {
return MakeConv<Conv3DAttrs>(data, weight, strides, padding, dilation, groups, channels,
kernel_size, data_layout, kernel_layout, out_layout, out_dtype,
"nn.conv3d");
TVM_REGISTER_GLOBAL("relay.op.nn._make.conv2d_transpose")
.set_body_typed([](Expr data, Expr weight, Array<IndexExpr> strides, Array<IndexExpr> padding,
Array<IndexExpr> dilation, int groups, IndexExpr channels,
- Array<IndexExpr> kernel_size, std::string data_layout,
- std::string kernel_layout, std::string out_layout,
- Array<IndexExpr> output_padding, DataType out_dtype) {
+ Array<IndexExpr> kernel_size, String data_layout, String kernel_layout,
+ String out_layout, Array<IndexExpr> output_padding, DataType out_dtype) {
return MakeConvTranspose<Conv2DTransposeAttrs>(
data, weight, strides, padding, dilation, groups, channels, kernel_size, data_layout,
kernel_layout, out_layout, output_padding, out_dtype, "nn.conv2d_transpose");
TVM_REGISTER_GLOBAL("relay.op.nn._make.conv1d_transpose")
.set_body_typed([](Expr data, Expr weight, Array<IndexExpr> strides, Array<IndexExpr> padding,
Array<IndexExpr> dilation, int groups, IndexExpr channels,
- Array<IndexExpr> kernel_size, std::string data_layout,
- std::string kernel_layout, std::string out_layout,
- Array<IndexExpr> output_padding, DataType out_dtype) {
+ Array<IndexExpr> kernel_size, String data_layout, String kernel_layout,
+ String out_layout, Array<IndexExpr> output_padding, DataType out_dtype) {
return MakeConvTranspose<Conv1DTransposeAttrs>(
data, weight, strides, padding, dilation, groups, channels, kernel_size, data_layout,
kernel_layout, out_layout, output_padding, out_dtype, "nn.conv1d_transpose");
TVM_REGISTER_GLOBAL("relay.op.nn._make.contrib_conv2d_winograd_without_weight_transform")
.set_body_typed([](Expr data, Expr weight, int tile_size, Array<IndexExpr> strides,
Array<IndexExpr> padding, Array<IndexExpr> dilation, int groups,
- IndexExpr channels, Array<IndexExpr> kernel_size, std::string data_layout,
- std::string kernel_layout, std::string out_layout, DataType out_dtype) {
+ IndexExpr channels, Array<IndexExpr> kernel_size, String data_layout,
+ String kernel_layout, String out_layout, DataType out_dtype) {
return MakeConvWinograd<Conv2DWinogradAttrs>(
data, weight, tile_size, strides, padding, dilation, groups, channels, kernel_size,
data_layout, kernel_layout, out_layout, out_dtype,
TVM_REGISTER_GLOBAL("relay.op.nn._make.contrib_conv3d_winograd_without_weight_transform")
.set_body_typed([](Expr data, Expr weight, int tile_size, Array<IndexExpr> strides,
Array<IndexExpr> padding, Array<IndexExpr> dilation, int groups,
- IndexExpr channels, Array<IndexExpr> kernel_size, std::string data_layout,
- std::string kernel_layout, std::string out_layout, DataType out_dtype) {
+ IndexExpr channels, Array<IndexExpr> kernel_size, String data_layout,
+ String kernel_layout, String out_layout, DataType out_dtype) {
return MakeConvWinograd<Conv3DWinogradAttrs>(
data, weight, tile_size, strides, padding, dilation, groups, channels, kernel_size,
data_layout, kernel_layout, out_layout, out_dtype,
TVM_REGISTER_GLOBAL("relay.op.nn._make.contrib_conv2d_NCHWc")
.set_body_typed([](Expr data, Expr weight, Array<IndexExpr> strides, Array<IndexExpr> padding,
Array<IndexExpr> dilation, int groups, IndexExpr channels,
- Array<IndexExpr> kernel_size, std::string data_layout,
- std::string kernel_layout, std::string out_layout, DataType out_dtype) {
+ Array<IndexExpr> kernel_size, String data_layout, String kernel_layout,
+ String out_layout, DataType out_dtype) {
return MakeConv<Conv2DAttrs>(data, weight, strides, padding, dilation, groups, channels,
kernel_size, data_layout, kernel_layout, out_layout, out_dtype,
"nn.contrib_conv2d_NCHWc");
TVM_REGISTER_GLOBAL("relay.op.nn._make.contrib_depthwise_conv2d_NCHWc")
.set_body_typed([](Expr data, Expr weight, Array<IndexExpr> strides, Array<IndexExpr> padding,
Array<IndexExpr> dilation, int groups, IndexExpr channels,
- Array<IndexExpr> kernel_size, std::string data_layout,
- std::string kernel_layout, std::string out_layout, DataType out_dtype) {
+ Array<IndexExpr> kernel_size, String data_layout, String kernel_layout,
+ String out_layout, DataType out_dtype) {
return MakeConv<Conv2DAttrs>(data, weight, strides, padding, dilation, groups, channels,
kernel_size, data_layout, kernel_layout, out_layout, out_dtype,
"nn.contrib_depthwise_conv2d_NCHWc");
TVM_REGISTER_GLOBAL("relay.op.nn._make.deformable_conv2d")
.set_body_typed([](Expr data, Expr offset, Expr weight, Array<IndexExpr> strides,
Array<IndexExpr> padding, Array<IndexExpr> dilation, int deformable_groups,
- int groups, int channels, Array<IndexExpr> kernel_size,
- std::string data_layout, std::string kernel_layout, std::string out_layout,
- DataType out_dtype) {
+ int groups, int channels, Array<IndexExpr> kernel_size, String data_layout,
+ String kernel_layout, String out_layout, DataType out_dtype) {
return MakeDeformableConv<DeformableConv2DAttrs>(
data, offset, weight, strides, padding, dilation, deformable_groups, groups, channels,
kernel_size, data_layout, kernel_layout, out_layout, out_dtype, "nn.deformable_conv2d");
// Positional relay function to create DepthToSpace operator
// used by frontend FFI
-Expr MakeDepthToSpace(Expr data, int block_size, std::string layout, std::string mode) {
+Expr MakeDepthToSpace(Expr data, int block_size, String layout, String mode) {
auto attrs = make_object<SubPixelAttrs>();
attrs->block_size = block_size;
attrs->layout = std::move(layout);
// Positional relay function to create SpaceToDepth operator
// used by frontend FFI
-Expr MakeSpaceToDepth(Expr data, int block_size, std::string layout) {
+Expr MakeSpaceToDepth(Expr data, int block_size, String layout) {
auto attrs = make_object<SubPixelAttrs>();
attrs->block_size = block_size;
attrs->layout = std::move(layout);
}
// Handler to create a call to the padding op used by front-end FFI
-Expr MakePad(Expr data, Array<Array<IndexExpr>> pad_width, double pad_value, std::string pad_mode) {
+Expr MakePad(Expr data, Array<Array<IndexExpr>> pad_width, double pad_value, String pad_mode) {
auto attrs = make_object<PadAttrs>();
attrs->pad_value = pad_value;
attrs->pad_width = std::move(pad_width);
}
// Handler to create a call to the padding op used by front-end FFI
-Expr MakeMirrorPad(Expr data, Array<Array<IndexExpr>> pad_width, std::string mode) {
+Expr MakeMirrorPad(Expr data, Array<Array<IndexExpr>> pad_width, String mode) {
auto attrs = make_object<MirrorPadAttrs>();
attrs->mode = mode;
attrs->pad_width = std::move(pad_width);
template <typename T>
Expr MakeMaxPool(Expr data, Array<IndexExpr> pool_size, Array<IndexExpr> strides,
- Array<IndexExpr> padding, std::string layout, bool ceil_mode,
- std::string op_name) {
+ Array<IndexExpr> padding, String layout, bool ceil_mode, String op_name) {
auto attrs = make_object<T>();
attrs->pool_size = std::move(pool_size);
attrs->strides = std::move(strides);
template <typename T>
Expr MakeAvgPool(Expr data, Array<IndexExpr> pool_size, Array<IndexExpr> strides,
- Array<IndexExpr> padding, std::string layout, bool ceil_mode,
- bool count_include_pad, std::string op_name) {
+ Array<IndexExpr> padding, String layout, bool ceil_mode, bool count_include_pad,
+ String op_name) {
auto attrs = make_object<T>();
attrs->pool_size = std::move(pool_size);
attrs->strides = std::move(strides);
TVM_REGISTER_GLOBAL("relay.op.nn._make.max_pool2d")
.set_body_typed([](Expr data, Array<IndexExpr> pool_size, Array<IndexExpr> strides,
- Array<IndexExpr> padding, std::string layout, bool ceil_mode) {
+ Array<IndexExpr> padding, String layout, bool ceil_mode) {
return MakeMaxPool<MaxPool2DAttrs>(data, pool_size, strides, padding, layout, ceil_mode,
"nn.max_pool2d");
});
// AvgPool2D
TVM_REGISTER_GLOBAL("relay.op.nn._make.avg_pool2d")
.set_body_typed([](Expr data, Array<IndexExpr> pool_size, Array<IndexExpr> strides,
- Array<IndexExpr> padding, std::string layout, bool ceil_mode,
+ Array<IndexExpr> padding, String layout, bool ceil_mode,
bool count_include_pad) {
return MakeAvgPool<AvgPool2DAttrs>(data, pool_size, strides, padding, layout, ceil_mode,
count_include_pad, "nn.avg_pool2d");
return Array<te::Tensor>{topi::nn::global_pool(inputs[0], mode, layout.name())};
}
-Expr MakeGlobalAvgPool2D(Expr data, std::string layout) {
+Expr MakeGlobalAvgPool2D(Expr data, String layout) {
auto attrs = make_object<GlobalPool2DAttrs>();
attrs->layout = std::move(layout);
static const Op& op = Op::Get("nn.global_avg_pool2d");
.set_attr<FTVMCompute>("FTVMCompute", GlobalPool2DCompute<topi::nn::kAvgPool>);
// GlobalMaxPool
-Expr MakeGlobalMaxPool2D(Expr data, std::string layout) {
+Expr MakeGlobalMaxPool2D(Expr data, String layout) {
auto attrs = make_object<GlobalPool2DAttrs>();
attrs->layout = std::move(layout);
static const Op& op = Op::Get("nn.global_max_pool2d");
}
// relay.nn.adaptive_avg_pool2d
-Expr MakeAdaptiveAvgPool2D(Expr data, Array<IndexExpr> output_size, std::string layout) {
+Expr MakeAdaptiveAvgPool2D(Expr data, Array<IndexExpr> output_size, String layout) {
auto attrs = make_object<AdaptivePool2DAttrs>();
attrs->output_size = std::move(output_size);
attrs->layout = std::move(layout);
.set_attr<FTVMCompute>("FTVMCompute", AdaptivePool2DCompute<topi::nn::kAvgPool>);
// relay.nn.adaptive_max_pool2d
-Expr MakeAdaptiveMaxPool2D(Expr data, Array<IndexExpr> output_size, std::string layout) {
+Expr MakeAdaptiveMaxPool2D(Expr data, Array<IndexExpr> output_size, String layout) {
auto attrs = make_object<AdaptivePool2DAttrs>();
attrs->output_size = std::move(output_size);
attrs->layout = std::move(layout);
}
// relay.nn.adaptive_max_pool3d
-Expr MakeAdaptiveMaxPool3D(Expr data, Array<IndexExpr> output_size, std::string layout) {
+Expr MakeAdaptiveMaxPool3D(Expr data, Array<IndexExpr> output_size, String layout) {
auto attrs = make_object<AdaptivePool3DAttrs>();
attrs->output_size = std::move(output_size);
attrs->layout = std::move(layout);
.set_attr<FTVMCompute>("FTVMCompute", AdaptivePool3DCompute<topi::nn::kMaxPool>);
// relay.nn.adaptive_max_pool3d
-Expr MakeAdaptiveAvgPool3D(Expr data, Array<IndexExpr> output_size, std::string layout) {
+Expr MakeAdaptiveAvgPool3D(Expr data, Array<IndexExpr> output_size, String layout) {
auto attrs = make_object<AdaptivePool3DAttrs>();
attrs->output_size = std::move(output_size);
attrs->layout = std::move(layout);
// MaxPool2DGrad
Expr MakeMaxPool2DGrad(Expr out_grad, Expr data, Array<IndexExpr> pool_size,
- Array<IndexExpr> strides, Array<IndexExpr> padding, std::string layout,
+ Array<IndexExpr> strides, Array<IndexExpr> padding, String layout,
bool ceil_mode) {
auto attrs = make_object<MaxPool2DAttrs>();
attrs->pool_size = std::move(pool_size);
// AvgPool2DGrad
Expr MakeAvgPool2DGrad(Expr out_grad, Expr data, Array<IndexExpr> pool_size,
- Array<IndexExpr> strides, Array<IndexExpr> padding, std::string layout,
+ Array<IndexExpr> strides, Array<IndexExpr> padding, String layout,
bool ceil_mode, bool count_include_pad) {
auto attrs = make_object<AvgPool2DAttrs>();
attrs->pool_size = std::move(pool_size);
TVM_REGISTER_GLOBAL("relay.op.nn._make.max_pool1d")
.set_body_typed([](Expr data, Array<IndexExpr> pool_size, Array<IndexExpr> strides,
- Array<IndexExpr> padding, std::string layout, bool ceil_mode) {
+ Array<IndexExpr> padding, String layout, bool ceil_mode) {
return MakeMaxPool<MaxPool1DAttrs>(data, pool_size, strides, padding, layout, ceil_mode,
"nn.max_pool1d");
});
// AvgPool1D
TVM_REGISTER_GLOBAL("relay.op.nn._make.avg_pool1d")
.set_body_typed([](Expr data, Array<IndexExpr> pool_size, Array<IndexExpr> strides,
- Array<IndexExpr> padding, std::string layout, bool ceil_mode,
+ Array<IndexExpr> padding, String layout, bool ceil_mode,
bool count_include_pad) {
return MakeAvgPool<AvgPool1DAttrs>(data, pool_size, strides, padding, layout, ceil_mode,
count_include_pad, "nn.avg_pool1d");
TVM_REGISTER_GLOBAL("relay.op.nn._make.max_pool3d")
.set_body_typed([](Expr data, Array<IndexExpr> pool_size, Array<IndexExpr> strides,
- Array<IndexExpr> padding, std::string layout, bool ceil_mode) {
+ Array<IndexExpr> padding, String layout, bool ceil_mode) {
return MakeMaxPool<MaxPool3DAttrs>(data, pool_size, strides, padding, layout, ceil_mode,
"nn.max_pool3d");
});
// AvgPool3D
TVM_REGISTER_GLOBAL("relay.op.nn._make.avg_pool3d")
.set_body_typed([](Expr data, Array<IndexExpr> pool_size, Array<IndexExpr> strides,
- Array<IndexExpr> padding, std::string layout, bool ceil_mode,
+ Array<IndexExpr> padding, String layout, bool ceil_mode,
bool count_include_pad) {
return MakeAvgPool<AvgPool3DAttrs>(data, pool_size, strides, padding, layout, ceil_mode,
count_include_pad, "nn.avg_pool3d");
// Positional relay function to create upsampling operator
// used by frontend FFI.
-Expr MakeUpSampling(Expr data, double scale_h, double scale_w, std::string layout,
- std::string method, bool align_corners) {
+Expr MakeUpSampling(Expr data, double scale_h, double scale_w, String layout, String method,
+ bool align_corners) {
auto attrs = make_object<UpSamplingAttrs>();
attrs->layout = std::move(layout);
attrs->method = std::move(method);
// Positional relay function to create upsampling3d operator
// used by frontend FFI.
-Expr MakeUpSampling3D(Expr data, double scale_d, double scale_h, double scale_w, std::string layout,
- std::string method, std::string coordinate_transformation_mode) {
+Expr MakeUpSampling3D(Expr data, double scale_d, double scale_h, double scale_w, String layout,
+ String method, String coordinate_transformation_mode) {
auto attrs = make_object<UpSampling3DAttrs>();
attrs->layout = std::move(layout);
attrs->method = std::move(method);
}
}
-Expr MakeTake(Expr data, Expr indices, Integer axis, std::string mode) {
+Expr MakeTake(Expr data, Expr indices, Integer axis, String mode) {
auto attrs = make_object<TakeAttrs>();
attrs->axis = std::move(axis);
attrs->mode = std::move(mode);
return true;
}
-Expr MakeLayoutTransform(Expr data, std::string src_layout, std::string dst_layout) {
+Expr MakeLayoutTransform(Expr data, String src_layout, String dst_layout) {
auto attrs = make_object<LayoutTransformAttrs>();
attrs->src_layout = std::move(src_layout);
attrs->dst_layout = std::move(dst_layout);
}
Expr MakeROIAlign(Expr data, Expr rois, Array<IndexExpr> pooled_size, double spatial_scale,
- int sample_ratio, std::string layout) {
+ int sample_ratio, String layout) {
auto attrs = make_object<ROIAlignAttrs>();
attrs->pooled_size = pooled_size;
attrs->spatial_scale = spatial_scale;
}
Expr MakeROIPool(Expr data, Expr rois, Array<IndexExpr> pooled_size, double spatial_scale,
- std::string layout) {
+ String layout) {
auto attrs = make_object<ROIPoolAttrs>();
attrs->pooled_size = pooled_size;
attrs->spatial_scale = spatial_scale;
Expr MakeQnnConv2D(Expr data, Expr weight, Expr input_zero_point, Expr kernel_zero_point,
Expr input_scale, Expr kernel_scale, Array<IndexExpr> strides,
Array<IndexExpr> padding, Array<IndexExpr> dilation, int groups,
- IndexExpr channels, Array<IndexExpr> kernel_size, std::string data_layout,
- std::string kernel_layout, std::string out_layout, DataType out_dtype) {
+ IndexExpr channels, Array<IndexExpr> kernel_size, String data_layout,
+ String kernel_layout, String out_layout, DataType out_dtype) {
auto attrs = make_object<Conv2DAttrs>();
attrs->strides = std::move(strides);
attrs->padding = std::move(padding);
// Positional relay function to create qnn requantize operator
// used by frontend FFI.
Expr MakeRequantize(Expr data, Expr input_scale, Expr input_zero_point, Expr output_scale,
- Expr output_zero_point, int axis, std::string rounding, DataType out_dtype) {
+ Expr output_zero_point, int axis, String rounding, DataType out_dtype) {
auto attrs = make_object<RequantizeAttrs>();
attrs->axis = axis;
attrs->rounding = std::move(rounding);
TVM_REGISTER_GLOBAL("relay._quantize.simulated_quantize")
.set_body_typed([](Expr data, Expr dom_scale, Expr clip_min, Expr clip_max, int kind, bool sign,
- std::string rounding) {
+ String rounding) {
auto attrs = make_object<SimulatedQuantizeAttrs>();
attrs->kind = kind;
attrs->sign = sign;
namespace transform {
-Pass CombineParallelOpBatch(const std::string& op_name, const std::string& batch_op_name,
+Pass CombineParallelOpBatch(const String& op_name, const String& batch_op_name,
uint64_t min_num_branches) {
runtime::TypedPackedFunc<Function(Function, IRModule, PassContext)> pass_func =
[=](Function f, IRModule m, PassContext pc) {
}
};
-Expr ForwardRewrite(const Expr& expr, const std::string& rewrite_map_name,
+Expr ForwardRewrite(const Expr& expr, const String& rewrite_map_name,
std::function<ObjectRef(const Call&)> fcontext,
std::function<Expr(const Expr&)> fmulti_ref_trigger) {
auto rewrite_map = Op::GetAttrMap<FForwardRewrite>(rewrite_map_name);
namespace transform {
-Pass Legalize(const std::string& legalize_map_attr_name) {
+Pass Legalize(const String& legalize_map_attr_name) {
runtime::TypedPackedFunc<Function(Function, IRModule, PassContext)> pass_func =
[=](Function f, IRModule m, PassContext pc) {
return Downcast<Function>(relay::legalize::Legalize(f, legalize_map_attr_name));
Expr MakeExpandDims(Expr data, int axis, int num_newaxis);
-Expr MakeLayoutTransform(Expr data, std::string src_layout, std::string dst_layout);
+Expr MakeLayoutTransform(Expr data, String src_layout, String dst_layout);
Expr StopFusion(Expr data);
Expr Atomic(const Expr& e, const Var& v) { return v.defined() ? GetScope(e)->ll->Push(v, e) : e; }
Expr Compound(const Expr& orig, const Expr& now, const Var& v) {
- Var var = v.defined() ? v : Var(std::string("x"), Type());
+ Var var = v.defined() ? v : Var(String("x"), Type());
return GetScope(orig)->ll->Push(var, now);
}
assert isinstance(tvar, tvm.ir.GlobalTypeVar)
assert tvar.name_hint == "in0"
+def test_var():
+ # type var in 0.6
+ nodes = [
+ {"type_key": ""},
+ {"type_key": "relay.Var",
+ "attrs": {
+ "_checked_type_": "0",
+ "span": "0",
+ "type_annotation": "0",
+ "vid": "2"
+ }
+ },
+ {"type_key": "relay.Id",
+ "attrs": {"name_hint": "a3"}},
+ {"type_key": "relay.TensorType",
+ "attrs": {
+ "dtype": "float32",
+ "shape": "4",
+ "span": "0"
+ }
+ },
+ {"type_key": "Array",
+ "data": [5, 6]
+ },
+ {"type_key": "IntImm",
+ "attrs": {"dtype": "int32", "value": "16"}},
+ {"type_key": "IntImm",
+ "attrs": {"dtype": "int32", "value": "8"}}
+ ]
+ data = {
+ "root" : 1,
+ "nodes": nodes,
+ "attrs": {"tvm_version": "0.6.0"},
+ "b64ndarrays": [],
+ }
+ tvar = tvm.ir.load_json(json.dumps(data))
+ assert isinstance(tvar, relay.Var)
+ assert tvar.name_hint == "a3"
def test_incomplete_type():
nodes = [
if __name__ == "__main__":
test_op()
test_type_var()
+ test_var()
test_incomplete_type()
test_func_tuple_type()
test_global_var()