Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion paddle/cinn/hlir/dialect/operator/ir/manual_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,6 @@ void GroupOp::Build(pir::Builder& builder, // NOLINT
std::unique_ptr<pir::Block>&& block) {
VLOG(4) << "Start build GroupOp";
if (block && !block->empty()) {
// IR_ENFORCE(block->back().isa<pir::YieldOp>());
PADDLE_ENFORCE_EQ(block->back().isa<pir::YieldOp>(), true);
auto& op = block->back();
for (size_t i = 0; i < op.num_operands(); ++i) {
Expand Down
44 changes: 1 addition & 43 deletions paddle/common/enforce.h
Original file line number Diff line number Diff line change
Expand Up @@ -362,47 +362,5 @@ inline bool is_error(const T& stat) {
}

namespace pir {
class IrNotMetException : public std::exception {
public:
explicit IrNotMetException(const std::string& str)
: err_str_(str + ::common::enforce::GetCurrentTraceBackString()) {}

const char* what() const noexcept override { return err_str_.c_str(); }

private:
std::string err_str_;
::common::enforce::details::PaddleFatalGuard paddle_fatal_guard_;
};

#define IR_THROW(...) \
do { \
try { \
throw pir::IrNotMetException( \
paddle::string::Sprintf("Error occurred at: %s:%d :\n%s", \
__FILE__, \
__LINE__, \
paddle::string::Sprintf(__VA_ARGS__))); \
} catch (const std::exception& e) { \
std::cout << e.what() << std::endl; \
throw; \
} \
} while (0)

#define IR_ENFORCE(COND, ...) \
do { \
bool __cond__(COND); \
if (UNLIKELY(is_error(__cond__))) { \
try { \
throw pir::IrNotMetException( \
paddle::string::Sprintf("Error occurred at: %s:%d :\n%s", \
__FILE__, \
__LINE__, \
paddle::string::Sprintf(__VA_ARGS__))); \
} catch (const std::exception& e) { \
std::cout << e.what() << std::endl; \
throw; \
} \
} \
} while (0)

#define IR_THROW(...) PADDLE_THROW(phi::errors::Fatal(__VA_ARGS__))
} // namespace pir
2 changes: 1 addition & 1 deletion paddle/fluid/ir_adaptor/translator/utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ std::vector<std::string> CheckUnregisteredOperationInBlock(
OpTranscriber general_handler;
try {
general_handler.LookUpOpInfo(ctx, *op);
} catch (pir::IrNotMetException& e) {
} catch (common::enforce::EnforceNotMet& e) {
unregistered_ops.push_back(op->Type());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,13 +159,19 @@ inline ShapeOrData SliceRawInferSymbolicShape(
// Currently, we DO NOT support the case that any element in `axes` `starts`
// or `ends` is a Symbol.
auto vec_int64 = details::VecExpr2Int64(starts);
IR_ENFORCE(vec_int64.has_value(),
"for slice op, all the elements in `starts` must be int64_t");
PADDLE_ENFORCE_EQ(
vec_int64.has_value(),
true,
phi::errors::InvalidArgument(
"for slice op, all the elements in `starts` must be int64_t"));
std::vector<int64_t> starts_int = vec_int64.value();

vec_int64 = details::VecExpr2Int64(ends);
IR_ENFORCE(vec_int64.has_value(),
"for slice op, all the elements in `ends` must be int64_t");
PADDLE_ENFORCE_EQ(
vec_int64.has_value(),
true,
phi::errors::InvalidArgument(
"for slice op, all the elements in `ends` must be int64_t"));
std::vector<int64_t> ends_int = vec_int64.value();

const int64_t start =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -909,10 +909,13 @@ bool SqueezeOpInferSymbolicShape(

std::vector<int> squeeze_dims;
for (auto squeeze_dim : squeeze_dims_sym) {
IR_ENFORCE(squeeze_dim.Has<std::int64_t>(),
"in SqueezeOpInferSymbolicShape, axes must be known int type, "
"but got: %s",
symbol::ToString(squeeze_dim));
PADDLE_ENFORCE_EQ(
squeeze_dim.Has<std::int64_t>(),
true,
phi::errors::InvalidArgument(
"in SqueezeOpInferSymbolicShape, axes must be known int type, "
"but got: %s",
symbol::ToString(squeeze_dim)));
squeeze_dims.emplace_back(
static_cast<int>(squeeze_dim.Get<std::int64_t>()));
}
Expand Down Expand Up @@ -1031,10 +1034,13 @@ bool UnsqueezeOpInferSymbolicShape(

int cur_output_rank = x_dims_size;
for (auto axis_expr : axes_sym) {
IR_ENFORCE(axis_expr.Has<std::int64_t>(),
"in UnsqueezeOpInferSymbolicShape, axes must be known int type, "
"but got: %s",
symbol::ToString(axis_expr));
PADDLE_ENFORCE_EQ(
axis_expr.Has<std::int64_t>(),
true,
phi::errors::InvalidArgument(
"in UnsqueezeOpInferSymbolicShape, axes must be known int type, "
"but got: %s",
symbol::ToString(axis_expr)));
int axis = static_cast<int>(axis_expr.Get<std::int64_t>());
int cur = axis < 0 ? axis + cur_output_rank + 1 : axis;

Expand Down
34 changes: 21 additions & 13 deletions paddle/fluid/pir/dialect/operator/ir/control_flow_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -904,31 +904,39 @@ void AssertOp::VerifySig() {
"The size %d of inputs must be equal to 2.", input_size));

if ((*this)->operand_source(0).type().isa<pir::DenseTensorType>()) {
IR_ENFORCE((*this)
->operand_source(0)
.type()
.dyn_cast<pir::DenseTensorType>()
.dtype()
.isa<pir::BoolType>(),
"Type validation failed for the 0th input, it should be a "
"bool DenseTensorType.");
PADDLE_ENFORCE_EQ(
(*this)
->operand_source(0)
.type()
.dyn_cast<pir::DenseTensorType>()
.dtype()
.isa<pir::BoolType>(),
true,
phi::errors::InvalidArgument(
"Type validation failed for the 0th input, it should be a "
"bool DenseTensorType."));
}

if (auto vec_type =
(*this)->operand(1).type().dyn_cast<pir::VectorType>()) {
for (size_t i = 0; i < vec_type.size(); ++i) {
IR_ENFORCE(vec_type[i].isa<paddle::dialect::DenseTensorType>() ||
vec_type[i].isa<paddle::dialect::SelectedRowsType>(),
"Type validation failed for the 1th input.");
PADDLE_ENFORCE_EQ(
vec_type[i].isa<paddle::dialect::DenseTensorType>() ||
vec_type[i].isa<paddle::dialect::SelectedRowsType>(),
true,
phi::errors::InvalidArgument(
"Type validation failed for the 1th input."));
}
} else {
IR_ENFORCE(
PADDLE_ENFORCE_EQ(
(*this)->operand(1).type().isa<paddle::dialect::DenseTensorType>() ||
(*this)
->operand(1)
.type()
.isa<paddle::dialect::SelectedRowsType>(),
"Type validation failed for the 1th input.");
true,
phi::errors::InvalidArgument(
"Type validation failed for the 1th input."));
}
}
VLOG(4) << "Verifying attributes:";
Expand Down
37 changes: 22 additions & 15 deletions paddle/fluid/pir/dialect/operator/ir/manual_onednn_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -200,26 +200,33 @@ void ExpandOp::VerifySig() {
2u,
phi::errors::InvalidArgument(
"The size %d of inputs must be equal to 2.", input_size));
IR_ENFORCE((*this)
->operand_source(0)
.type()
.isa<paddle::dialect::DenseTensorType>(),
"Type validation failed for the 0th input, got %s.",
(*this)->operand_source(0).type());
PADDLE_ENFORCE_EQ((*this)
->operand_source(0)
.type()
.isa<paddle::dialect::DenseTensorType>(),
true,
phi::errors::InvalidArgument(
"Type validation failed for the 0th input, got %s.",
(*this)->operand_source(0).type()));
if (auto vec_type =
(*this)->operand_source(1).type().dyn_cast<pir::VectorType>()) {
for (size_t i = 0; i < vec_type.size(); ++i) {
IR_ENFORCE(vec_type[i].isa<paddle::dialect::DenseTensorType>(),
"Type validation failed for the 1th input, got %s.",
(*this)->operand_source(1).type());
PADDLE_ENFORCE_EQ(
vec_type[i].isa<paddle::dialect::DenseTensorType>(),
true,
phi::errors::InvalidArgument(
"Type validation failed for the 1th input, got %s.",
(*this)->operand_source(1).type()));
}
} else {
IR_ENFORCE((*this)
->operand_source(1)
.type()
.isa<paddle::dialect::DenseTensorType>(),
"Type validation failed for the 1th input, got %s.",
(*this)->operand_source(1).type());
PADDLE_ENFORCE_EQ((*this)
->operand_source(1)
.type()
.isa<paddle::dialect::DenseTensorType>(),
true,
phi::errors::InvalidArgument(
"Type validation failed for the 1th input, got %s.",
(*this)->operand_source(1).type()));
}
}
VLOG(4) << "Verifying attributes:";
Expand Down
Loading