Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -120,8 +120,8 @@ struct SkipLayerNorm : public PatternBase {
// (word, weights_0) lookup_table -> word_emb
// (pos, weights_1) lookup_table -> pos_emb
// (sent, weights_2) lookup_table -> sent_emb
// (word_emb, pos_emb) elementweise_add -> elementwise_out_0
// (elemtwise_out_0, sent_emb) elementweise_add -> elementwise_out_1
// (word_emb, pos_emb) elementwise_add -> elementwise_out_0
// (elemtwise_out_0, sent_emb) elementwise_add -> elementwise_out_1
// (elementwise_out_1, scale, bias) layer_norm -> layer_norm_out
//
// and then convert the corresponding subgraph to:
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/graph_pattern_detector.cc
Original file line number Diff line number Diff line change
Expand Up @@ -4384,7 +4384,7 @@ PDNode *patterns::ReverseRollPattern::operator()(PDNode *in) {
}
auto reshape2_50_op =
pattern->NewNode(reshape2_50_op_repr())->assert_is_op("reshape2");
auto reshape2_50_out = pattern->NewNode(reshaep2_50_out_repr())
auto reshape2_50_out = pattern->NewNode(reshape2_50_out_repr())
->assert_is_op_output("reshape2", "Out")
->AsOutput();
reshape2_00_op->LinksFrom({in});
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/graph_pattern_detector.h
Original file line number Diff line number Diff line change
Expand Up @@ -2245,7 +2245,7 @@ struct ReverseRollPattern : public PatternBase {
PATTERN_DECL_NODE(roll_40_op);
PATTERN_DECL_NODE(roll_40_out);
PATTERN_DECL_NODE(reshape2_50_op);
PATTERN_DECL_NODE(reshaep2_50_out);
PATTERN_DECL_NODE(reshape2_50_out);
};

// pattern for merge_layernorm
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/is_test_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ namespace ir {
class Graph;

void IsTestPass::ApplyImpl(ir::Graph* graph) const {
VLOG(3) << "Sets is_test attrbiute to true and if it is missing, inserts it "
VLOG(3) << "Sets is_test attribute to true and if it is missing, inserts it "
"for activations and pooling.";
auto op_list = {"pool2d", "sigmoid", "logsigmoid",
"softshrink", "exp", "brelu",
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/op_compat_sensible_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ AttrCompat& OpCompat::AddAttr(const std::string& attr_name) {
attr_compats_.find(attr_name),
attr_compats_.end(),
platform::errors::InvalidArgument(
"The attrubute compat with the same name has been added"));
"The attribute compat with the same name has been added"));
attr_compats_.emplace(attr_name, AttrCompat(attr_name, this));
return attr_compats_.at(attr_name);
}
Expand Down
8 changes: 4 additions & 4 deletions paddle/fluid/framework/ir/op_compat_sensible_pass.h
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ class AttrCompat {
//! Assert the attribute is an integer in the `candidates` domain.
AttrCompat& IsIntIn(const std::set<int>& candidates);

// @{ Number-releated methods
// @{ Number-related methods
//! Assert the attribute is a number and > `v`.
template <typename T>
AttrCompat& IsNumGT(T v);
Expand Down Expand Up @@ -162,11 +162,11 @@ class OpCompat {
* void AddOpCompat(OpCompat&& judger);
*
* Most of the Passes are used for fusing ops, so we define a method for such
* scenerios.
* scenarios.
* void AccessSubgraph(const GraphPatternDetector::subgraph_t& subgraph,
Graph* g);
* It will check the Op compatibility automatically.
* For other scenirios, one should call `IsCompat` by himself.
* For other scenarios, one should call `IsCompat` by himself.
*
* A FC fuse pass example:
* class FcFusePass : public OpCompatSensiblePass {
Expand All @@ -177,7 +177,7 @@ class OpCompat {
* .AddInput("Input").IsTensor().End()
* .AddAttr("in_num_col_dims").IsNumGE(1);
* AddOpCompat(OpCompat("Add")). ...;
* // There are multiple activation implemention.
* // There are multiple activation implementation.
* AddOpCompat(OpCompat("Tanh")). ...;
* AddOpCompat(OpCompat("Sigmoid")). ...;
* }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ struct PrelnSkipLayerNorm : public PatternBase {
// and then convert the corresponding subgraph to:
//
// (word, pos, sent, weights_0, weights_1, weights_2,
// scale, baias) Prelnembedding_eltwise_layernorm -> layer_norm_out +
// scale, bias) Prelnembedding_eltwise_layernorm -> layer_norm_out +
// elementwise_add_out
//
//
Expand Down
6 changes: 3 additions & 3 deletions paddle/fluid/framework/ir/reverse_roll_fuse_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
GET_IR_NODE(reshape2_30_op); \
GET_IR_NODE(reshape2_30_out); \
GET_IR_NODE(reshape2_50_op); \
GET_IR_NODE(reshaep2_50_out);
GET_IR_NODE(reshape2_50_out);

namespace paddle {
namespace framework {
Expand Down Expand Up @@ -168,15 +168,15 @@ int ReverseRollFusePass::ApplyPattern(ir::Graph* graph, bool with_roll) const {
OpDesc reverse_roll_desc(reshape2_00_op->Op()->Block());
reverse_roll_desc.SetType("reverse_roll");
reverse_roll_desc.SetInput("X", {subgraph.at(x)->Name()});
reverse_roll_desc.SetOutput("Out", {reshaep2_50_out->Name()});
reverse_roll_desc.SetOutput("Out", {reshape2_50_out->Name()});
reverse_roll_desc.SetAttr("window_number", window_number);
reverse_roll_desc.SetAttr("window_size", window_size);
reverse_roll_desc.SetAttr("window_len", window_len);
reverse_roll_desc.SetAttr("shift_size", static_cast<int>(shift_size));
reverse_roll_desc.SetAttr("input_resolution", input_resolution);
auto reverse_roll_node = graph->CreateOpNode(&reverse_roll_desc);
IR_NODE_LINK_TO(subgraph.at(x), reverse_roll_node);
IR_NODE_LINK_TO(reverse_roll_node, reshaep2_50_out);
IR_NODE_LINK_TO(reverse_roll_node, reshape2_50_out);
GraphSafeRemoveNodes(graph, del_node_set);
++fuse_count;
};
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/shuffle_channel_detect_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ void ShuffleChannelDetectPass::ApplyImpl(ir::Graph* graph) const {
}
}

// shuffle_channel dosen't change shape
// shuffle_channel doesn't change shape
if ((reshape2_shape[0] != -1) && (x_shape1[0] != reshape2_shape[0])) {
return;
}
Expand Down
4 changes: 2 additions & 2 deletions paddle/fluid/framework/ir/simplify_with_basic_ops_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ namespace framework {
namespace ir {

/*
* This pass is to simplify the Grpah, it may contains:
* - replace comlicated op with basic op
* This pass is to simplify the Graph, it may contains:
* - replace complicated op with basic op
* - remove some unnecessary op
*
* In the current implementation, it supports:
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/subgraph_detector.cc
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ std::vector<std::vector<Node *>> SubgraphDetector::ExtractSubGraphs() {
node_map[n->id()] = n;
}

// create breif node map
// create brief node map
for (auto &itr : brief_node_map) {
for (Node *node : itr.second->node->inputs) {
if (!valid_node_ids.count(node->id())) {
Expand Down
16 changes: 8 additions & 8 deletions paddle/fluid/framework/ir/transfer_layout_elim_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ namespace ir {
// Put transfer_layout after op_node
// transfer_info is for case when we need know this transfer_layout info,
// nchw_nhwc or nhwc_nchw
void TransferLayoutElimPass::PutTranferlayoutAfterOp(
void TransferLayoutElimPass::PutTransferlayoutAfterOp(
Node *op_node, ir::Graph *graph, std::string *transfer_info) const {
std::unordered_set<const Node *> remove_nodes;
// Ensure op_node has only one output!
Expand Down Expand Up @@ -200,9 +200,9 @@ bool TransferLayoutElimPass::AllInputIsTransferlayout(
// | | |
// op0 op1 op2

void TransferLayoutElimPass::ElimTwoTranferlayout(Node *op_node,
ir::Graph *graph,
bool *modify) const {
void TransferLayoutElimPass::ElimTwoTransferlayout(Node *op_node,
ir::Graph *graph,
bool *modify) const {
std::unordered_set<const Node *> remove_nodes;
auto var1 = op_node->inputs[0];
auto transfer_layout0 = var1->inputs[0];
Expand Down Expand Up @@ -293,7 +293,7 @@ void TransferLayoutElimPass::ApplyImpl(ir::Graph *graph) const {
if (AllInputIsTransferlayout(op_node)) {
if (is_concat_like_op) {
std::string transfer_info;
PutTranferlayoutAfterOp(op_node, graph, &transfer_info);
PutTransferlayoutAfterOp(op_node, graph, &transfer_info);
int axis = op_node->Op()->GetAttrIfExists<int>("axis");
int modify_axis = axis;
if (transfer_info == "nhwc_nchw") {
Expand All @@ -319,7 +319,7 @@ void TransferLayoutElimPass::ApplyImpl(ir::Graph *graph) const {
break;
}
if (is_pool_like_op) {
PutTranferlayoutAfterOp(op_node, graph, nullptr);
PutTransferlayoutAfterOp(op_node, graph, nullptr);
op_node->Op()->SetAttr(
"data_format",
transfer_format(
Expand All @@ -329,13 +329,13 @@ void TransferLayoutElimPass::ApplyImpl(ir::Graph *graph) const {
break;
}
if (is_act_like_op) {
PutTranferlayoutAfterOp(op_node, graph, nullptr);
PutTransferlayoutAfterOp(op_node, graph, nullptr);
modify = true;
move_down_count++;
break;
}
if (is_elim_op) {
ElimTwoTranferlayout(op_node, graph, &modify);
ElimTwoTransferlayout(op_node, graph, &modify);
elim_count++;
break;
}
Expand Down
12 changes: 6 additions & 6 deletions paddle/fluid/framework/ir/transfer_layout_elim_pass.h
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,12 @@ class TransferLayoutElimPass : public FusePassBase {
protected:
void ApplyImpl(ir::Graph *graph) const override;
bool AllInputIsTransferlayout(const Node *op_node) const;
void PutTranferlayoutAfterOp(Node *op_node,
ir::Graph *graph,
std::string *transfer_info) const;
void ElimTwoTranferlayout(Node *op_node,
ir::Graph *graph,
bool *modify) const;
void PutTransferlayoutAfterOp(Node *op_node,
ir::Graph *graph,
std::string *transfer_info) const;
void ElimTwoTransferlayout(Node *op_node,
ir::Graph *graph,
bool *modify) const;
};

} // namespace ir
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ Origin subgraph:
| cumsum
| |
\ /
elemetwise_sub
elementwise_sub

Fused subgraph:
generate_sequence_xpu
Expand Down