*/
/*!
- * \file error.h
- * \brief The set of errors raised by Relay.
+ * \file tvm/ir/error.h
+ * \brief Utilities for error tracking and reporting.
*/
-#ifndef TVM_RELAY_ERROR_H_
-#define TVM_RELAY_ERROR_H_
+#ifndef TVM_IR_ERROR_H_
+#define TVM_IR_ERROR_H_
+#include <tvm/ir/span.h>
#include <tvm/ir/module.h>
#include <string>
#include <sstream>
#include <unordered_map>
-#include "./base.h"
-#include "./expr.h"
-
-
namespace tvm {
-namespace relay {
-
-#define RELAY_ERROR(msg) (RelayErrorStream() << msg)
-
-// Forward declaratio for RelayErrorStream.
-struct Error;
-
-/*! \brief A wrapper around std::stringstream.
+/*!
+ * \brief A wrapper around std::stringstream to build error.
*
- * This is designed to avoid platform specific
- * issues compiling and using std::stringstream
- * for error reporting.
+ * Can be consumed by Error to construct an error.
+ *
+ * \code
+ *
+ * void ReportError(const Error& err);
+ *
+ * void Test(int number) {
+ * // Use error reporter to construct an error.
+ * ReportError(ErrorBuilder() << "This is an error number=" << number);
+ * }
+ *
+ * \endcode
*/
-struct RelayErrorStream {
- std::stringstream ss;
-
+struct ErrorBuilder {
+ public:
template<typename T>
- RelayErrorStream& operator<<(const T& t) {
- ss << t;
+ ErrorBuilder& operator<<(const T& val) { // NOLINT(*)
+ stream_ << val;
return *this;
}
- std::string str() const {
- return ss.str();
- }
-
- void Raise() const;
+ private:
+ std::stringstream stream_;
+ friend class Error;
};
-struct Error : public dmlc::Error {
- Span sp;
- explicit Error(const std::string& msg) : dmlc::Error(msg), sp(nullptr) {}
- Error(const RelayErrorStream& msg) : dmlc::Error(msg.str()), sp(nullptr) {} // NOLINT(*)
- Error(const Error& err) : dmlc::Error(err.what()), sp(nullptr) {}
- Error() : dmlc::Error(""), sp(nullptr) {}
+/*!
+ * \brief Custom Error class to be thrown during compilation.
+ */
+class Error : public dmlc::Error {
+ public:
+ /*! \brief Location of the error */
+ Span span;
+ /*!
+ * \brief construct error from message.
+ * \param msg The message
+ */
+ explicit Error(const std::string& msg) : dmlc::Error(msg), span(nullptr) {}
+ /*!
+ * \brief construct error from error builder.
+ * \param err The error builder
+ */
+ Error(const ErrorBuilder& err) : dmlc::Error(err.stream_.str()), span(nullptr) {} // NOLINT(*)
+ /*!
+ * \brief copy constructor.
+ * \param other The other ereor.
+ */
+ Error(const Error& other) : dmlc::Error(other.what()), span(other.span) {} // NOLINT(*)
+ /*!
+ * \brief default constructor. */
+ Error() : dmlc::Error(""), span(nullptr) {}
};
-/*! \brief An abstraction around how errors are stored and reported.
+/*!
+ * \brief An abstraction around how errors are stored and reported.
* Designed to be opaque to users, so we can support a robust and simpler
* error reporting mode, as well as a more complex mode.
*
*/
class ErrorReporter {
public:
+ /*! \brief default constructor. */
ErrorReporter() : errors_(), node_to_error_() {}
- /*! \brief Report a tvm::relay::Error.
+ /*!
+ * \brief Report a tvm::Error.
*
* This API is useful for reporting spanned errors.
*
* \param err The error to report.
*/
void Report(const Error& err) {
- if (!err.sp.defined()) {
+ if (!err.span.defined()) {
throw err;
}
this->errors_.push_back(err);
}
- /*! \brief Report an error against a program, using the full program
+ /*!
+ * \brief Report an error against a program, using the full program
* error reporting strategy.
*
* This error reporting method requires the global function in which
* \param node The expression or type to report the error at.
* \param err The error message to report.
*/
- inline void ReportAt(const GlobalVar& global, const ObjectRef& node, std::stringstream& err) {
+ void ReportAt(const GlobalVar& global, const ObjectRef& node, std::stringstream& err) {
std::string err_msg = err.str();
this->ReportAt(global, node, Error(err_msg));
}
- /*! \brief Report an error against a program, using the full program
+ /*!
+ * \brief Report an error against a program, using the full program
* error reporting strategy.
*
* This error reporting method requires the global function in which
*/
void ReportAt(const GlobalVar& global, const ObjectRef& node, const Error& err);
- /*! \brief Render all reported errors and exit the program.
+ /*!
+ * \brief Render all reported errors and exit the program.
*
* This function should be used after executing a pass to render reported errors.
*
std::unordered_map<ObjectRef, GlobalVar, ObjectHash, ObjectEqual> node_to_gv_;
};
-} // namespace relay
} // namespace tvm
-
-#endif // TVM_RELAY_ERROR_H_
+#endif // TVM_IR_ERROR_H_
#define TVM_RELAY_EXPR_FUNCTOR_H_
#include <tvm/node/functor.h>
+#include <tvm/ir/error.h>
+
#include <string>
#include <utility>
#include <unordered_map>
+
#include "./expr.h"
#include "./adt.h"
#include "./op.h"
-#include "./error.h"
+
namespace tvm {
namespace relay {
#define TVM_RELAY_PATTERN_FUNCTOR_H_
#include <tvm/node/functor.h>
+#include <tvm/ir/error.h>
+
#include <string>
#include <utility>
#include <unordered_map>
+
#include "./expr.h"
#include "./op.h"
-#include "./error.h"
#include "./adt.h"
namespace tvm {
#include <tvm/base.h>
#include <tvm/packed_func_ext.h>
#include <tvm/relay/attrs/transform.h>
-#include <tvm/relay/error.h>
+#include <tvm/ir/error.h>
#include <tvm/relay/expr.h>
#include <tvm/ir/module.h>
#include <tvm/relay/op.h>
*/
/*!
- * \file error_reporter.h
- * \brief The set of errors raised by Relay.
+ * \file ir/error.cc
+ * \brief Utilities for error tracking and reporting.
*/
-#include <tvm/relay/expr.h>
#include <tvm/ir/module.h>
-#include <tvm/relay/error.h>
+#include <tvm/ir/error.h>
+// NOTE on dependencies on relay AsText.
+// We calls into relay's printing module for better rendering.
+// These dependency does not happen at the interface-level.
+// And is only used to enhance developer experiences when relay
+// functions are presented.
+#include <tvm/relay/expr.h>
+
#include <string>
#include <vector>
#include <rang.hpp>
namespace tvm {
-namespace relay {
-
-void RelayErrorStream::Raise() const {
- throw Error(*this);
-}
template<typename T, typename U>
using NodeMap = std::unordered_map<T, U, ObjectHash, ObjectEqual>;
// First we pick an error reporting strategy for each error.
// TODO(@jroesch): Spanned errors are currently not supported.
for (auto err : this->errors_) {
- CHECK(!err.sp.defined()) << "attempting to use spanned errors, currently not supported";
+ CHECK(!err.span.defined()) << "attempting to use spanned errors, currently not supported";
}
NodeMap<GlobalVar, NodeMap<ObjectRef, std::string>> error_maps;
//
// The annotation callback will annotate the error messages
// contained in the map.
- annotated_prog << AsText(func, false, [&err_map](tvm::relay::Expr expr) {
+ annotated_prog << relay::AsText(func, false, [&err_map](tvm::relay::Expr expr) {
auto it = err_map.find(expr);
if (it != err_map.end()) {
CHECK_NE(it->second.size(), 0);
this->node_to_gv_.insert({ node, global });
}
-} // namespace relay
} // namespace tvm
*/
#include <tvm/operation.h>
-#include <tvm/relay/error.h>
+#include <tvm/ir/error.h>
#include <tvm/relay/expr_functor.h>
#include <tvm/relay/interpreter.h>
#include <tvm/relay/qnn/transform.h>
#ifndef TVM_RELAY_BACKEND_VM_COMPILER_H_
#define TVM_RELAY_BACKEND_VM_COMPILER_H_
-#include <tvm/relay/error.h>
+#include <tvm/ir/error.h>
#include <tvm/relay/expr_functor.h>
#include <tvm/relay/interpreter.h>
#include <tvm/logging.h>
* \brief Transform operators.
*/
#include <tvm/relay/op.h>
-#include <tvm/relay/error.h>
+#include <tvm/ir/error.h>
#include <tvm/relay/attrs/transform.h>
#include <tvm/expr_operator.h>
#include <tvm/ir.h>
for (size_t j = 0; j < first->shape.size(); ++j) {
if (j == static_cast<size_t>(axis)) continue;
if (reporter->AssertEQ(first->shape[j], e->shape[j])) continue;
- throw relay::Error("relay.stack requires all tensors have the same shape "
+ throw Error("relay.stack requires all tensors have the same shape "
"on non-stacking axes");
}
}
#ifndef TVM_RELAY_OP_TENSOR_TRANSFORM_H_
#define TVM_RELAY_OP_TENSOR_TRANSFORM_H_
-#include <tvm/relay/error.h>
+#include <tvm/ir/error.h>
#include <vector>
#include <algorithm>
#include <limits>
*/
const auto* tensor_tuple = types[0].as<TupleTypeNode>();
if (tensor_tuple == nullptr) {
- throw relay::Error(
- RELAY_ERROR(
- "concatenate requires a tuple of tensors as the first argument, found "
- << PrettyPrint(types[0])));
+ throw Error(
+ ErrorBuilder()
+ << "concatenate requires a tuple of tensors as the first argument, found "
+ << PrettyPrint(types[0]));
} else if (types[0].as<IncompleteTypeNode>() != nullptr) {
return false;
}
// Sanity check: axis
int axis = param->axis;
if (!(-ndim <= axis && axis < ndim)) {
- throw relay::Error(RELAY_ERROR(
+ throw Error(ErrorBuilder() <<
"concatenate only accepts `axis` in [-ndim, ndim)" <<
", but got axis = " << axis <<
- ", and ndim = " << ndim));
+ ", and ndim = " << ndim);
}
axis = axis < 0 ? ndim + axis : axis;
int e_ndim = static_cast<int>(e->shape.size());
const DataType& e_dtype = e->dtype;
if (e_ndim != ndim) {
- throw relay::Error("relay.concatenate requires all tensors have the same ndim");
+ throw Error("relay.concatenate requires all tensors have the same ndim");
}
if (e_dtype != dtype) {
- throw relay::Error("relay.concatenate requires all tensors have the same dtype");
+ throw Error("relay.concatenate requires all tensors have the same dtype");
}
for (size_t j = 0; j < first->shape.size(); ++j) {
if (j == static_cast<size_t>(axis)) continue;
if (reporter->AssertEQ(first->shape[j], e->shape[j])) continue;
- throw relay::Error("relay.concatenate requires all tensors have the same shape "
- "on non-concatenating axes");
+ throw Error("relay.concatenate requires all tensors have the same shape "
+ "on non-concatenating axes");
}
}
} else if (EqualCheck(s1, s2)) {
oshape.push_back(s1);
} else {
- RELAY_ERROR(
- "Incompatible broadcast type "
- << t1 << " and " << t2).Raise();
+ throw Error(ErrorBuilder()
+ << "Incompatible broadcast type "
+ << t1 << " and " << t2);
}
}
#ifndef TVM_RELAY_OP_TYPE_RELATIONS_H_
#define TVM_RELAY_OP_TYPE_RELATIONS_H_
-#include <tvm/relay/error.h>
+#include <tvm/ir/error.h>
#include <tvm/relay/type.h>
#include <string>
* contains a data type such as `int`, `float`, `uint`.
*/
#include <tvm/relay/analysis.h>
-#include <tvm/relay/error.h>
+#include <tvm/ir/error.h>
#include "../ir/type_functor.h"
namespace tvm {
Kind expected, const std::string& description) {
Kind k = this->VisitType(t);
if (k != expected) {
- ReportFatalError(RELAY_ERROR("Incorrect kind for a " << description
- << ". Type " << t << " inside " << outer
- << " is of kind " << k
- << " but was expected to be "
- << expected));
+ ReportFatalError(ErrorBuilder()
+ << "Incorrect kind for a " << description
+ << ". Type " << t << " inside " << outer
+ << " is of kind " << k
+ << " but was expected to be "
+ << expected);
}
}
TypeCall tc = GetRef<TypeCall>(op);
const auto* gtv = op->func.as<GlobalTypeVarNode>();
if (gtv == nullptr) {
- ReportFatalError(RELAY_ERROR("The callee in " << tc
- << " is not a global type var, but is " << op->func));
+ ReportFatalError(
+ ErrorBuilder() <<"The callee in " << tc
+ << " is not a global type var, but is " << op->func);
}
CheckKindMatches(op->func, tc, Kind::kAdtHandle, "type call function");
auto var = GetRef<GlobalTypeVar>(gtv);
auto data = mod->LookupTypeDef(var);
if (data->type_vars.size() != op->args.size()) {
- ReportFatalError(RELAY_ERROR("Expected " << data->type_vars.size() << "arguments for " << tc
- << "; got " << op->args.size()));
+ ReportFatalError(ErrorBuilder()
+ << "Expected " << data->type_vars.size() << "arguments for " << tc
+ << "; got " << op->args.size());
}
return Kind::kType;
}
for (const auto& con : op->constructors) {
if (!con->belong_to.same_as(op->header)) {
- ReportFatalError(RELAY_ERROR(con << " has header " << con->belong_to
- << " but " << op << " has header " << op->header));
+ ReportFatalError(ErrorBuilder()
+ <<con << " has header " << con->belong_to
+ << " but " << op << " has header " << op->header);
}
for (const Type& t : con->inputs) {
* dynamic error unless exhaustiveness is checked in advance.
*/
#include <tvm/relay/adt.h>
-#include <tvm/relay/error.h>
+#include <tvm/ir/error.h>
#include <tvm/relay/expr_functor.h>
#include <tvm/relay/pattern_functor.h>
#include <stack>
* constraints we will trigger an error.
*/
-#include <tvm/relay/error.h>
+#include <tvm/ir/error.h>
#include <tvm/relay/expr_functor.h>
#include <tvm/relay/pattern_functor.h>
#include <tvm/relay/analysis.h>
} catch (const dmlc::Error &e) {
this->ReportFatalError(
expr,
- RELAY_ERROR("Error unifying `"
+ ErrorBuilder()
+ << "Error unifying `"
<< t1
<< "` and `"
<< t2
- << "`: " << e.what()));
+ << "`: " << e.what());
return Type();
}
}
if (!mod_.defined()) {
this->ReportFatalError(
GetRef<GlobalVar>(op),
- RELAY_ERROR(
+ ErrorBuilder() <<
"Cannot do type inference on global variables " \
- "without a module"));
+ "without a module");
}
Expr e = mod_->Lookup(var);
return e->checked_type();
auto* tc = unified.as<TypeCallNode>();
if (!tc) {
- this->ReportFatalError(pc, RELAY_ERROR("Expected a type call, got " << unified));
+ this->ReportFatalError(pc, ErrorBuilder() << "Expected a type call, got " << unified);
}
if (td->header != tc->func) {
- this->ReportFatalError(pc, RELAY_ERROR("ADT headers must match, but we have "
- << td->header << " and " << tc->func));
+ this->ReportFatalError(pc,
+ ErrorBuilder() << "ADT headers must match, but we have "
+ << td->header << " and " << tc->func);
}
if (td->type_vars.size() != tc->args.size()) {
- this->ReportFatalError(pc, RELAY_ERROR("The number of type args must match"
- << "the number of type vars in the type data: "
- << td->type_vars.size() << " != " << tc->args.size()));
+ this->ReportFatalError(pc,
+ ErrorBuilder() << "The number of type args must match"
+ << "the number of type vars in the type data: "
+ << td->type_vars.size() << " != " << tc->args.size());
}
std::unordered_map<TypeVar, Type, ObjectHash, ObjectEqual> type_var_map_;
for (size_t i = 0; i < td->type_vars.size(); ++i) {
}
CHECK(con->constructor->inputs.size() == con->patterns.size()) << "not enough pattern";
if (con->constructor->inputs.size() != con->patterns.size()) {
- this->ReportFatalError(pc, RELAY_ERROR("Not enough inputs for the constructor; "
- << "expected " << con->constructor->inputs.size()
- << ", got " << con->patterns.size()));
+ this->ReportFatalError(pc,
+ ErrorBuilder() << "Not enough inputs for the constructor; "
+ << "expected " << con->constructor->inputs.size()
+ << ", got " << con->patterns.size());
}
for (size_t i = 0; i < con->constructor->inputs.size(); ++i) {
VisitPattern(con->patterns[i], Bind(con->constructor->inputs[i], type_var_map_));
auto* tt = unified.as<TupleTypeNode>();
if (!tt) {
- this->ReportFatalError(pt, RELAY_ERROR("Expected a tuple type, got " << unified));
+ this->ReportFatalError(pt, ErrorBuilder() << "Expected a tuple type, got " << unified);
}
CHECK(tup->patterns.size() == tt->fields.size()) << "not enough pattern";
for (size_t i = 0; i < tup->patterns.size(); ++i) {
Match match = GetRef<Match>(op);
Array<Pattern> unmatched_cases = UnmatchedCases(match, this->mod_);
if (unmatched_cases.size() != 0) {
- RelayErrorStream ss;
+ ErrorBuilder ss;
ss << "match expression does not handle the following cases: ";
int i = 0;
for (auto cs : unmatched_cases) {
if (fn_ty_node == nullptr && inc_ty_node == nullptr) {
this->ReportFatalError(
GetRef<Call>(call),
- RELAY_ERROR("only expressions with function types can be called, found "
- << ftype));
+ ErrorBuilder()
+ << "only expressions with function types can be called, found "
+ << ftype);
}
// incomplete type => it must be a function taking the arg types
Array<Type> type_args = call->type_args;
if (type_args.size() > fn_ty_node->type_params.size()) {
this->ReportFatalError(GetRef<Call>(call),
- RELAY_ERROR("Incorrect number of type args in "
+ ErrorBuilder()
+ << "Incorrect number of type args in "
<< call->span << ": "
<< "Expected "
<< fn_ty_node->type_params.size()
- << "but got " << type_args.size()));
+ << "but got " << type_args.size());
}
FuncType fn_ty = InstantiateFuncType(fn_ty_node, type_args);
if (type_arity < number_of_args) {
this->ReportFatalError(
GetRef<Call>(call),
- RELAY_ERROR("the function is provided too many arguments "
- << "expected " << type_arity << ", found " << number_of_args));
+ ErrorBuilder()
+ << "the function is provided too many arguments "
+ << "expected " << type_arity << ", found " << number_of_args);
} else {
this->ReportFatalError(
GetRef<Call>(call),
- RELAY_ERROR("the function is provided too few arguments "
- << "expected " << type_arity << ", found " << number_of_args));
+ ErrorBuilder()
+ << "the function is provided too few arguments "
+ << "expected " << type_arity << ", found " << number_of_args);
}
}
} else {
Type resolved = this->VisitType(lhs->resolved_type, rhs->resolved_type);
if (!resolved.defined()) {
- solver_->ReportError(RELAY_ERROR("unable to unify: "
- << "`" << PrettyPrint(lhs->resolved_type) << "` and `"
- << PrettyPrint(rhs->resolved_type) << "`"),
- this->loc);
+ solver_->ReportError(
+ ErrorBuilder() << "unable to unify: "
+ << "`" << PrettyPrint(lhs->resolved_type) << "` and `"
+ << PrettyPrint(rhs->resolved_type) << "`",
+ this->loc);
return lhs->resolved_type;
} else {
TypeNode* top = solver_->GetTypeNode(resolved);
tvm::Array<IndexExpr> shape;
if (tt1->shape.size() != tt2->shape.size()) {
this->solver_->ReportError(
- RELAY_ERROR(
+ ErrorBuilder() <<
"tensor type `" << PrettyPrint(tt1) <<
"` has " << tt1->shape.size() <<
" dimensions, while `" <<
PrettyPrint(tt2) <<
"` has " << tt2->shape.size() <<
- " dimensions"), this->loc);
+ " dimensions", this->loc);
return Type(nullptr);
}
}
if (mismatches.size() != 0) {
- RelayErrorStream err;
+ ErrorBuilder err;
err << "in particular ";
for (auto mismatch : mismatches) {
err << "dimension "
rnode->resolved = false;
} catch (const dmlc::Error& err) {
rnode->resolved = false;
- this->ReportError(RELAY_ERROR("an internal invariant was violated while "
- "typechecking your program "
- << err.what()),
- rnode->location);
+ this->ReportError(
+ ErrorBuilder() << "an internal invariant was violated while "
+ << "typechecking your program "
+ << err.what(),
+ rnode->location);
}
// Mark inqueue as false after the function call
#include <tvm/relay/expr.h>
#include <tvm/relay/type.h>
#include <tvm/relay/analysis.h>
-#include <tvm/relay/error.h>
+#include <tvm/ir/error.h>
#include <vector>
#include <queue>
#include <unordered_map>
// Check the scale and zero point types
const auto* input_scales_tuple = types[1].as<TupleTypeNode>();
if (input_scales_tuple == nullptr) {
- throw relay::Error(
- RELAY_ERROR("qnn concatenate requires a tuple of scales as the second argument, found "
- << PrettyPrint(types[1])));
+ throw Error(
+ ErrorBuilder()
+ << "qnn concatenate requires a tuple of scales as the second argument, found "
+ << PrettyPrint(types[1]));
}
for (const auto& input_scale : input_scales_tuple->fields) {
CHECK(IsScalarType(input_scale, DataType::Float(32))); // input_scales[idx]
const auto* input_zero_points_tuple = types[2].as<TupleTypeNode>();
if (input_zero_points_tuple == nullptr) {
- throw relay::Error(
- RELAY_ERROR("qnn concatenate requires a tuple of zero_points as the third argument, found "
- << PrettyPrint(types[2])));
+ throw Error(
+ ErrorBuilder()
+ << "qnn concatenate requires a tuple of zero_points as the third argument, found "
+ << PrettyPrint(types[2]));
}
for (const auto& input_zero_point : input_zero_points_tuple->fields) {
CHECK(IsScalarType(input_zero_point, DataType::Int(32))); // input_zero_points[idx]