Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 19 additions & 30 deletions paddle/fluid/pybind/pir.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1099,15 +1099,26 @@ std::list<Operation *>::const_iterator list_offset(const Block *block,
return it;
}

template <class F>
void range_block_do(const Block *block, std::vector<int> range, F fn) {
template <typename F, typename S>
void range_block_do(const Block *block,
std::vector<int> range,
F fn,
S skip_fn) {
for (auto it = list_offset(block, range[0]);
it != list_offset(block, range[1]);
++it) {
if (skip_fn(*it)) {
continue;
}
fn(*it);
}
}

template <typename F>
void range_block_do(const Block *block, std::vector<int> range, F fn) {
range_block_do(block, range, fn, [](Operation *op) { return false; });
}

template <typename K, typename V>
bool ExistsInMapValues(const std::map<K, V> &m, V value) {
for (const auto &[k, v] : m) {
Expand Down Expand Up @@ -1461,7 +1472,9 @@ SplitedResult SplitForwardBackward(
[&forward_mapper, &forward_program, &clone_options](Operation *op) {
auto *cloned_op = op->Clone(forward_mapper, clone_options);
forward_program->block()->push_back(cloned_op);
});
},
// Skip the ShadowOutputOp.
/*skip_fn=*/[](Operation *op) { return op->isa<pir::ShadowOutputOp>(); });
auto &forward_value_map = forward_mapper.GetMutableMap<pir::Value>();

// backward program construct.
Expand Down Expand Up @@ -1493,37 +1506,13 @@ SplitedResult SplitForwardBackward(
if (v.impl() == nullptr) {
return;
}
// Skip the value that already in forward_inputs or forward_params.
if (std::find(forward_inputs.begin(), forward_inputs.end(), v) !=
forward_inputs.end() ||
std::find(forward_params.begin(), forward_params.end(), v) !=
forward_params.end()) {
// Skip the value that already in forward_params.
if (std::find(forward_params.begin(), forward_params.end(), v) !=
forward_params.end()) {
return;
}
// NOTE(Aurelius84): we should skip insert ShadowOutputOp repeatedly by
// calling SplitForwardBackward multi-times.
std::string shadow_output_name =
std::string("output_") + std::to_string(counter);
std::unordered_set<pir::Value> inserted_value;
for (auto it = forward_program->block()->rbegin();
it != forward_program->block()->rend();
++it) {
if (it->isa<pir::ShadowOutputOp>()) {
auto out_name =
it->attribute<pir::StrAttribute>("output_name").AsString();
if (out_name == shadow_output_name) {
VLOG(4) << out_name
<< " has been inserted ShadowOutputOp, skip it now.";
return;
}

inserted_value.insert(it->operand_source(0));
}
}

if (inserted_value.count(forward_value_map[v])) {
return;
}
auto op_info = ctx->GetRegisteredOpInfo(pir::ShadowOutputOp::name());
pir::AttributeMap attribute_map = {
{"output_name", pir::StrAttribute::get(ctx, shadow_output_name)},
Expand Down
2 changes: 1 addition & 1 deletion test/ir/pir/cinn/symbolic/test_sub_graph_chatglm2_4_st.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def train(self, net, to_static, with_prim=False, with_cinn=False):
def test_ast_prim_cinn(self):
st_out = self.train(self.net, to_static=True)
cinn_out = self.train(
self.net, to_static=True, with_prim=False, with_cinn=False
self.net, to_static=True, with_prim=True, with_cinn=False
)
for st, cinn in zip(
paddle.utils.flatten(st_out), paddle.utils.flatten(cinn_out)
Expand Down