diff --git a/paddle/fluid/operators/compat/fused_softplus.pbtxt b/paddle/fluid/operators/compat/fused_softplus.pbtxt deleted file mode 100644 index 030530e9dce5c1..00000000000000 --- a/paddle/fluid/operators/compat/fused_softplus.pbtxt +++ /dev/null @@ -1,31 +0,0 @@ -type: "fused_softplus" -def { - inputs { - name: "X" - } - outputs { - name: "Out" - } - attrs { - name: "beta" - type: FLOAT - } - attrs { - name: "threshold" - type: FLOAT - } -} -extra { - attrs { - name: "fuse_activation" - type: STRING - } - attrs { - name: "fuse_alpha" - type: FLOAT - } - attrs { - name: "fuse_beta" - type: FLOAT - } -} diff --git a/paddle/fluid/operators/fused/fused_softplus_op.cc b/paddle/fluid/operators/fused/fused_softplus_op.cc deleted file mode 100644 index 2e0d8ca7d91eb5..00000000000000 --- a/paddle/fluid/operators/fused/fused_softplus_op.cc +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "paddle/fluid/framework/op_registry.h" - -namespace paddle { -namespace operators { - -class FusedSoftplusOp : public framework::OperatorWithKernel { - public: - using framework::OperatorWithKernel::OperatorWithKernel; - - void InferShape(framework::InferShapeContext* ctx) const override { - ctx->ShareDim("X", /*->*/ "Out"); - ctx->ShareLoD("X", /*->*/ "Out"); - } - - protected: - phi::KernelKey GetExpectedKernelType( - const framework::ExecutionContext& ctx) const override { - auto data_type = this->IndicateVarDataType(ctx, "X"); - return phi::KernelKey(data_type, ctx.GetPlace()); - } -}; - -class FusedSoftplusOpMaker : public framework::OpProtoAndCheckerMaker { - public: - void Make() override { - AddInput("X", "Input of softplus operator"); - AddOutput("Out", "Output of softplus operator"); - AddAttr("beta", "Beta value for the softplus formulation") - .SetDefault(1.0f); - AddAttr("threshold", "Values above this revert to a linear function") - .SetDefault(20.0f); - AddAttr( - "fuse_activation", - "Activation type from softplus_activation_onednn_fuse_pass") - .SetDefault(""); - AddAttr("fuse_alpha", - "Activation alpha from softplus_activation_onednn_fuse_pass") - .SetDefault(0.0f); - AddAttr("fuse_beta", - "Activation beta from softplus_activation_onednn_fuse_pass") - .SetDefault(0.0f); - AddComment(R"DOC(Softplus extended with oneDNN-specific fusion logic.)DOC"); - } -}; - -} // namespace operators -} // namespace paddle - -namespace ops = paddle::operators; -REGISTER_OPERATOR( - fused_softplus, - ops::FusedSoftplusOp, - ops::FusedSoftplusOpMaker, - paddle::framework::EmptyGradOpMaker, - paddle::framework::EmptyGradOpMaker); diff --git a/paddle/fluid/operators/ops_signature/fused_softplus_sig.cc b/paddle/fluid/operators/ops_signature/fused_softplus_sig.cc deleted file mode 100644 index 56445af104dc11..00000000000000 --- a/paddle/fluid/operators/ops_signature/fused_softplus_sig.cc +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "paddle/phi/core/compat/op_utils.h" - -namespace phi { - -KernelSignature FusedSoftplusOpArgumentMapping( - const ArgumentMappingContext& ctx UNUSED) { - return KernelSignature( - "fused_softplus", - {"X"}, - {"beta", "threshold", "fuse_activation", "fuse_alpha", "fuse_beta"}, - {"Out"}); -} - -} // namespace phi - -PD_REGISTER_ARG_MAPPING_FN(fused_softplus, phi::FusedSoftplusOpArgumentMapping); diff --git a/test/cpp/inference/infer_ut/test_ppyolov2_r50vd.cc b/test/cpp/inference/infer_ut/test_ppyolov2_r50vd.cc index 6935ca0d37fcdd..e7ba73b004401f 100644 --- a/test/cpp/inference/infer_ut/test_ppyolov2_r50vd.cc +++ b/test/cpp/inference/infer_ut/test_ppyolov2_r50vd.cc @@ -102,6 +102,9 @@ TEST(tensorrt_tester_ppyolov2_r50vd, multi_thread2_trt_fp32_bz1) { std::cout << "finish multi-thread test" << std::endl; } +// fused_softplus is about to be removed, the test uses fused_softplus and is +// disabled +/* TEST(mkldnn_tester_ppyolov2_r50vd, multi_thread2_mkl_bz2) { int thread_num = 2; // init input data @@ -149,6 +152,7 @@ TEST(mkldnn_tester_ppyolov2_r50vd, multi_thread2_mkl_bz2) { std::cout << "finish multi-thread test" << std::endl; } +*/ } // namespace paddle_infer diff --git a/test/ir/inference/test_onednn_softplus_activation_fuse_pass.py b/test/ir/inference/test_onednn_softplus_activation_fuse_pass.py deleted file mode 100644 index 4a8e8604480125..00000000000000 --- a/test/ir/inference/test_onednn_softplus_activation_fuse_pass.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest -from functools import partial - -import hypothesis.strategies as st -import numpy as np -from auto_scan_test import PassAutoScanTest -from program_config import OpConfig, ProgramConfig, TensorConfig - - -class TestSoftplusActivationOneDNNFusePass(PassAutoScanTest): - def sample_program_config(self, draw): - activation_type = draw( - st.sampled_from( - [ - 'relu', - 'gelu', - 'tanh', - 'sigmoid', - 'swish', - 'mish', - 'sqrt', - 'hard_sigmoid', - 'hard_swish', - 'abs', - 'relu6', - 'clip', - 'leaky_relu', - ] - ) - ) - - def generate_input(): - return np.random.random([4, 3, 100, 100]).astype(np.float32) - - softplus_op = OpConfig( - type='softplus', - inputs={ - 'X': ['activation_X'], - }, - outputs={'Out': ['softplus_out']}, - attrs={ - 'beta': draw(st.floats(min_value=0.5, max_value=2)), - 'threshold': draw(st.floats(min_value=15, max_value=30)), - }, - ) - - if activation_type == 'clip': - activation_op = OpConfig( - activation_type, - inputs={'X': ['softplus_out']}, - outputs={'Out': ['activation_output']}, - min=draw(st.floats(min_value=0.1, max_value=0.49)), - max=draw(st.floats(min_value=0.5, max_value=1.0)), - ) - elif activation_type == "gelu": - activation_op = OpConfig( - activation_type, - inputs={"X": ["softplus_out"]}, - outputs={"Out": ["activation_output"]}, - approximate=draw(st.booleans()), - ) - elif activation_type == 'leaky_relu': - activation_op = OpConfig( - activation_type, - inputs={'X': ['softplus_out']}, - outputs={'Out': ['activation_output']}, - alpha=draw(st.floats(min_value=0.1, max_value=1.0)), - ) - elif activation_type == 'relu6': - activation_op = OpConfig( - activation_type, - inputs={'X': ['softplus_out']}, - outputs={'Out': ['activation_output']}, - threshold=6.0, - ) - elif activation_type == 'swish': - activation_op = OpConfig( - activation_type, - inputs={'X': ['softplus_out']}, - outputs={'Out': ['activation_output']}, - beta=1.0, - ) - else: - activation_op = OpConfig( - activation_type, - inputs={'X': ['softplus_out']}, - outputs={'Out': ['activation_output']}, - ) - - model_net = [softplus_op, activation_op] - - program_config = ProgramConfig( - ops=model_net, - weights={}, - inputs={ - 'activation_X': TensorConfig(data_gen=partial(generate_input)) - }, - outputs=['activation_output'], - ) - - return program_config - - def sample_predictor_configs(self, program_config): - config = self.create_inference_config(use_mkldnn=True) - yield config, ['fused_softplus'], (1e-5, 1e-5) - - def test(self): - self.run_and_statis( - quant=False, - max_examples=40, - passes=['softplus_activation_onednn_fuse_pass'], - ) - - -if __name__ == '__main__': - unittest.main()