Skip to content

Commit 2c7f32a

Browse files
Merge remote-tracking branch 'origin/develop' into feat/group2
2 parents 6fb257c + 1b44b2b commit 2c7f32a

File tree

53 files changed

+2153
-1951
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

53 files changed

+2153
-1951
lines changed

ci/coverage_test.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ function is_run_distribute_in_op_test() {
3131
if [ ! -f "$TARGET_FILE" ]; then
3232
continue
3333
fi
34-
ALL_OPTEST_BAN_AUTO_PARALLEL_TEST=`git diff -U0 upstream/$BRANCH "TARGET_FILE" | grep "+" | grep "check_auto_parallel=" || true`
34+
ALL_OPTEST_BAN_AUTO_PARALLEL_TEST=`git diff -U0 upstream/$BRANCH -- "$TARGET_FILE" | grep "+" | grep "check_auto_parallel=" || true`
3535
if [ "${ALL_OPTEST_BAN_AUTO_PARALLEL_TEST}" != "" ] && [ "${GIT_PR_ID}" != "" ]; then
3636
export FLAGS_COVERAGE_RUN_AUTO_PARALLEL_IN_OP_TEST=1
3737
echo "export FLAGS_COVERAGE_RUN_AUTO_PARALLEL_IN_OP_TEST=1" >> "$HOME/.bashrc"

paddle/fluid/framework/ir/fc_gru_fuse_pass.cc

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ int FCGRUFusePass::BuildFusion(Graph* graph,
181181
Node* bias,
182182
Node* hidden,
183183
Node* fc_bias,
184-
const bool use_mkldnn) {
184+
const bool use_onednn) {
185185
OpDesc op_desc;
186186
op_desc.SetType("fusion_gru");
187187

@@ -200,7 +200,7 @@ int FCGRUFusePass::BuildFusion(Graph* graph,
200200
gru->Op()->GetAttrIfExists<bool>("origin_mode"));
201201
// TODO(TJ): This should be a option for infer
202202
op_desc.SetAttr("use_seq", true);
203-
op_desc.SetAttr("use_mkldnn", use_mkldnn);
203+
op_desc.SetAttr("use_onednn", use_onednn);
204204
op_desc.SetAttr("activation", gru->Op()->GetAttr("activation"));
205205
op_desc.SetAttr("gate_activation", gru->Op()->GetAttr("gate_activation"));
206206

@@ -290,8 +290,9 @@ int FCGRUFusePass::BuildFusion(Graph* graph,
290290
LOG(INFO) << "fc_gru_fuse_pass not supported when origin_mode=True.";
291291
return;
292292
}
293-
const bool use_mkldnn =
294-
(mul->Op()->GetAttrIfExists<bool>("use_mkldnn") &&
293+
const bool use_onednn =
294+
((mul->Op()->GetAttrIfExists<bool>("use_mkldnn") ||
295+
mul->Op()->GetAttrIfExists<bool>("use_onednn")) &&
295296
gru->Op()->GetAttrIfExists<std::string>("activation") == "tanh" &&
296297
gru->Op()->GetAttrIfExists<std::string>("gate_activation") ==
297298
"sigmoid");
@@ -302,7 +303,7 @@ int FCGRUFusePass::BuildFusion(Graph* graph,
302303
GET_IR_NODE_FROM_SUBGRAPH(elementwise_add, elementwise_add, fc_pattern);
303304
GET_IR_NODE_FROM_SUBGRAPH(fc_out, elementwise_add_out, fc_pattern);
304305

305-
gru_creator(gru, x_n, w, Weight, Bias, Hidden, fc_bias, use_mkldnn);
306+
gru_creator(gru, x_n, w, Weight, Bias, Hidden, fc_bias, use_onednn);
306307
// Remove unneeded nodes.
307308
std::unordered_set<const Node*> marked_nodes({mul,
308309
gru,
@@ -314,7 +315,7 @@ int FCGRUFusePass::BuildFusion(Graph* graph,
314315
BatchHidden});
315316
GraphSafeRemoveNodes(graph, marked_nodes);
316317
} else {
317-
gru_creator(gru, x_n, w, Weight, Bias, Hidden, nullptr, use_mkldnn);
318+
gru_creator(gru, x_n, w, Weight, Bias, Hidden, nullptr, use_onednn);
318319
// Remove unneeded nodes.
319320
std::unordered_set<const Node*> marked_nodes(
320321
{mul, gru, BatchGate, BatchResetHiddenPrev, BatchHidden});

paddle/fluid/framework/ir/onednn/operator_scale_onednn_fuse_pass.cc

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -64,17 +64,18 @@ void FuseOperatorScaleOneDNNPass::FuseScale(Graph *graph,
6464
bool use_onednn_not = false;
6565
// use_mkldnn, use_onednn both set to false.
6666
if (operator_op->Op()->HasAttr("use_mkldnn") &&
67-
!(PADDLE_GET_CONST(bool, operator_op->Op()->GetAttr("use_mkldnn"))) &&
68-
operator_op->Op()->HasAttr("use_onednn") &&
69-
!(PADDLE_GET_CONST(bool, operator_op->Op()->GetAttr("use_onednn")))) {
70-
use_onednn_not = true;
67+
operator_op->Op()->HasAttr("use_onednn")) {
68+
if (!(PADDLE_GET_CONST(bool, operator_op->Op()->GetAttr("use_mkldnn"))) &&
69+
!(PADDLE_GET_CONST(bool, operator_op->Op()->GetAttr("use_onednn")))) {
70+
use_onednn_not = true;
71+
}
7172
} else if (operator_op->Op()->HasAttr("use_mkldnn") &&
7273
!(PADDLE_GET_CONST(bool,
7374
operator_op->Op()->GetAttr("use_mkldnn")))) {
7475
use_onednn_not = true;
75-
} else if (operator_op->Op()->HasAttr("use_mkldnn") &&
76+
} else if (operator_op->Op()->HasAttr("use_onednn") &&
7677
!(PADDLE_GET_CONST(bool,
77-
operator_op->Op()->GetAttr("use_mkldnn")))) {
78+
operator_op->Op()->GetAttr("use_onednn")))) {
7879
use_onednn_not = true;
7980
}
8081
if (use_onednn_not) {

paddle/fluid/ir_adaptor/translator/op_translator.cc

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1076,6 +1076,10 @@ struct CastOpTranscriber : public OpTranscriber {
10761076
attribute_map["mkldnn_data_type"] = pir::StrAttribute::get(
10771077
ctx, op_desc.GetAttrIfExists<std::string>("mkldnn_data_type"));
10781078
}
1079+
if (op_desc.HasAttr("onednn_data_type")) { // NOLINT
1080+
attribute_map["onednn_data_type"] = pir::StrAttribute::get(
1081+
ctx, op_desc.GetAttrIfExists<std::string>("onednn_data_type"));
1082+
}
10791083
#endif
10801084
return attribute_map;
10811085
}
@@ -1661,12 +1665,16 @@ struct SplitOpTranscriber : public OpTranscriber {
16611665
return attribute_map;
16621666
}
16631667
#ifdef PADDLE_WITH_DNNL
1664-
else if (op_desc.HasAttr("mkldnn_data_type")) { // NOLINT
1665-
pir::AttributeMap attribute_map = {
1666-
{"mkldnn_data_type",
1667-
pir::StrAttribute::get(
1668-
ctx, op_desc.GetAttrIfExists<std::string>("mkldnn_data_type"))},
1669-
};
1668+
else { // NOLINT
1669+
pir::AttributeMap attribute_map = {};
1670+
if (op_desc.HasAttr("mkldnn_data_type")) {
1671+
attribute_map["mkldnn_data_type"] = pir::StrAttribute::get(
1672+
ctx, op_desc.GetAttrIfExists<std::string>("mkldnn_data_type"));
1673+
}
1674+
if (op_desc.HasAttr("onednn_data_type")) {
1675+
attribute_map["onednn_data_type"] = pir::StrAttribute::get(
1676+
ctx, op_desc.GetAttrIfExists<std::string>("onednn_data_type"));
1677+
}
16701678
return attribute_map;
16711679
}
16721680
#endif

paddle/phi/ops/yaml/ops.yaml

Lines changed: 30 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -778,7 +778,12 @@
778778

779779
- op : bmm
780780
args : (Tensor x, Tensor y)
781-
output : Tensor
781+
python_api :
782+
name : [paddle.bmm, paddle.Tensor.bmm]
783+
args_alias:
784+
x : [input]
785+
y : [mat2]
786+
output : Tensor(out)
782787
infer_meta :
783788
func : BmmInferMeta
784789
kernel :
@@ -1205,6 +1210,10 @@
12051210

12061211
- op : cos
12071212
args : (Tensor x)
1213+
python_api:
1214+
name: [paddle.cos, paddle.Tensor.cos]
1215+
args_alias:
1216+
use_default_mapping : True
12081217
output : Tensor(out)
12091218
infer_meta :
12101219
func : UnchangedInferMeta
@@ -2185,6 +2194,10 @@
21852194

21862195
- op : floor
21872196
args : (Tensor x)
2197+
python_api:
2198+
name: [paddle.floor, paddle.Tensor.floor]
2199+
args_alias:
2200+
use_default_mapping : True
21882201
output : Tensor(out)
21892202
infer_meta :
21902203
func : UnchangedInferMeta
@@ -3181,6 +3194,10 @@
31813194

31823195
- op : log
31833196
args : (Tensor x)
3197+
python_api:
3198+
name: [paddle.log, paddle.Tensor.log]
3199+
args_alias:
3200+
use_default_mapping : True
31843201
output : Tensor(out)
31853202
infer_meta :
31863203
func : UnchangedInferMeta
@@ -4701,6 +4718,10 @@
47014718

47024719
- op : rsqrt
47034720
args : (Tensor x)
4721+
python_api:
4722+
name: [paddle.sqrt, paddle.Tensor.rsqrt]
4723+
args_alias:
4724+
use_default_mapping : True
47044725
output : Tensor(out)
47054726
infer_meta :
47064727
func : UnchangedInferMeta
@@ -4993,6 +5014,10 @@
49935014

49945015
- op : sign
49955016
args : (Tensor x)
5017+
python_api :
5018+
name: [paddle.sign, paddle.Tensor.sign]
5019+
args_alias:
5020+
use_default_mapping : True
49965021
output : Tensor(out)
49975022
infer_meta :
49985023
func : UnchangedInferMeta
@@ -5017,6 +5042,10 @@
50175042

50185043
- op : sin
50195044
args : (Tensor x)
5045+
python_api :
5046+
name: [paddle.sin, paddle.Tensor.sin]
5047+
args_alias:
5048+
use_default_mapping : True
50205049
output : Tensor(out)
50215050
infer_meta :
50225051
func : UnchangedInferMeta

paddle/scripts/paddle_build.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3295,7 +3295,7 @@ function is_run_distribute_in_op_test() {
32953295
if [ ! -f "$TARGET_FILE" ]; then
32963296
continue
32973297
fi
3298-
ALL_OPTEST_BAN_AUTO_PARALLEL_TEST=`git diff -U0 upstream/$BRANCH "$TARGET_FILE" | grep "+" | grep "check_auto_parallel=" || true`
3298+
ALL_OPTEST_BAN_AUTO_PARALLEL_TEST=`git diff -U0 upstream/$BRANCH -- "$TARGET_FILE" | grep "+" | grep "check_auto_parallel=" || true`
32993299
if [ "${ALL_OPTEST_BAN_AUTO_PARALLEL_TEST}" != "" ] && [ "${GIT_PR_ID}" != "" ]; then
33003300
export FLAGS_COVERAGE_RUN_AUTO_PARALLEL_IN_OP_TEST=1
33013301
fi

0 commit comments

Comments
 (0)