Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
277 changes: 0 additions & 277 deletions paddle/fluid/operators/activation_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -146,26 +146,6 @@ Sigmoid Activation

)DOC";

UNUSED constexpr char SiluDoc[] = R"DOC(
Silu Activation Operator

$$out = x * \\frac{1}{1 + e^{-x}}$$
)DOC";

UNUSED constexpr char LogSigmoidDoc[] = R"DOC(
Logsigmoid Activation Operator

$$out = \\log \\frac{1}{1 + e^{-x}}$$

)DOC";

UNUSED constexpr char Expm1Doc[] = R"DOC(
Expm1 Operator. Computes expm1 of x element-wise with a natural number :math:`e` as the base.

$$out = e^x - 1$$

)DOC";

UNUSED constexpr char ReluDoc[] = R"DOC(
Relu Activation Operator.

Expand Down Expand Up @@ -206,43 +186,6 @@ Please make sure input is legal in case of numeric errors.

)DOC";

UNUSED constexpr char CeilDoc[] = R"DOC(
Ceil Operator. Computes ceil of x element-wise.

.. math::
out = \left \lceil x \right \rceil

)DOC";

UNUSED constexpr char FloorDoc[] = R"DOC(
Floor Activation Operator. Computes floor of x element-wise.

$$out = \\lfloor x \\rfloor$$

)DOC";

UNUSED constexpr char RoundDoc[] = R"DOC(
The OP rounds the values in the input to the nearest integer value.

.. code-block:: text

input:
x.shape = [4]
x.data = [1.2, -0.9, 3.4, 0.9]

output:
out.shape = [4]
out.data = [1., -1., 3., 1.]

)DOC";

UNUSED constexpr char ReciprocalDoc[] = R"DOC(
Reciprocal Activation Operator.

$$out = \\frac{1}{x}$$

)DOC";

UNUSED constexpr char LogDoc[] = R"DOC(
Log Activation Operator.

Expand All @@ -252,33 +195,6 @@ Natural logarithm of x.

)DOC";

UNUSED constexpr char Log2Doc[] = R"DOC(
Log2 Activation Operator.

$$out = \log_2x$$

logarithm of x base to 2.

)DOC";

UNUSED constexpr char Log10Doc[] = R"DOC(
Log10 Activation Operator.

$$out = \log_10_x$$

logarithm of x base to 10.

)DOC";

UNUSED constexpr char Log1pDoc[] = R"DOC(
Log Activation Operator.

$out = \ln(x+1)$

Natural logarithm of x.

)DOC";

UNUSED constexpr char SquareDoc[] = R"DOC(
The OP square each elements of the inputs.

Expand Down Expand Up @@ -356,28 +272,6 @@ class SoftShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
}
};

class HardShrinkOpMaker : public framework::OpProtoAndCheckerMaker {
public:
void Make() override {
AddInput("X", "Input of HardShrink operator");
AddOutput("Out", "Output of HardShrink operator");
AddAttr<float>("threshold",
"The value of threshold for HardShrink. [default: 0.5]")
.SetDefault(0.5f);
AddComment(R"DOC(
:strong:`HardShrink activation operator`

.. math::
out = \begin{cases}
x, \text{if } x > \lambda \\
x, \text{if } x < -\lambda \\
0, \text{otherwise}
\end{cases}

)DOC");
}
};

class BReluOpMaker : public framework::OpProtoAndCheckerMaker {
public:
void Make() override {
Expand Down Expand Up @@ -454,39 +348,6 @@ class ELUGradOpMaker : public framework::SingleGradOpMaker<T> {
}
};

class LogitOpMaker : public framework::OpProtoAndCheckerMaker {
public:
void Make() override {
AddInput("X", "Input of Logit operator");
AddOutput("Out", "Output of Logit operator");
AddAttr<float>("eps",
"(float, default 1e-6f) the epsilon for input clamp bound")
.SetDefault(1e-6f);
AddComment(R"DOC(
Logit Operator.

this function is defined as follow:
$ logit=ln\left ( {\frac {x} {1-x}} \right ) $

)DOC");
}
};

template <typename T>
class LogitGradOpMaker : public framework::SingleGradOpMaker<T> {
public:
using framework::SingleGradOpMaker<T>::SingleGradOpMaker;

protected:
void Apply(GradOpPtr<T> grad_op) const override {
grad_op->SetType("logit_grad");
grad_op->SetInput("X", this->Input("X"));
grad_op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out"));
grad_op->SetOutput(framework::GradVarName("X"), this->InputGrad("X"));
grad_op->SetAttrMap(this->Attrs());
}
};

class CELUOpMaker : public framework::OpProtoAndCheckerMaker {
public:
void Make() override {
Expand Down Expand Up @@ -591,31 +452,6 @@ class ThresholdedReluOpMaker : public framework::OpProtoAndCheckerMaker {
}
};

class HardSigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
public:
void Make() override {
AddInput("X", "An N-D Tensor with data type float32, float64. ");
AddOutput("Out", "A Tensor with the same shape as input. ");
AddAttr<float>("slope",
"The slope of the linear approximation of sigmoid. Its "
"value MUST BE positive. Default is 0.2. ")
.SetDefault(0.2f);
AddAttr<float>(
"offset",
"The offset of the linear approximation of sigmoid. Default is 0.5. ")
.SetDefault(0.5f);
AddComment(R"DOC(
HardSigmoid Activation Operator.

A 3-part piecewise linear approximation of sigmoid(https://arxiv.org/abs/1603.00391),
which is much faster than sigmoid.

$$out = \max(0, \min(1, slope * x + offset))$$

)DOC");
}
};

class SwishOpMaker : public framework::OpProtoAndCheckerMaker {
public:
void Make() override {
Expand Down Expand Up @@ -684,22 +520,12 @@ It is recommended to use the defaults for this activation.
};

REGISTER_ACTIVATION_OP_MAKER(Sigmoid, SigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(Silu, SiluDoc);
REGISTER_ACTIVATION_OP_MAKER(LogSigmoid, LogSigmoidDoc);
REGISTER_ACTIVATION_OP_MAKER(Expm1, Expm1Doc);
REGISTER_ACTIVATION_OP_MAKER(Relu, ReluDoc);
REGISTER_ACTIVATION_OP_MAKER(Tanh, TanhDoc);
REGISTER_ACTIVATION_OP_MAKER(TanhShrink, TanhShrinkDoc);
REGISTER_ACTIVATION_OP_MAKER(Sqrt, SqrtDoc);
REGISTER_ACTIVATION_OP_MAKER(Rsqrt, RsqrtDoc);
REGISTER_ACTIVATION_OP_MAKER(Ceil, CeilDoc);
REGISTER_ACTIVATION_OP_MAKER(Floor, FloorDoc);
REGISTER_ACTIVATION_OP_MAKER(Round, RoundDoc);
REGISTER_ACTIVATION_OP_MAKER(Reciprocal, ReciprocalDoc);
REGISTER_ACTIVATION_OP_MAKER(Log, LogDoc);
REGISTER_ACTIVATION_OP_MAKER(Log2, Log2Doc);
REGISTER_ACTIVATION_OP_MAKER(Log10, Log10Doc);
REGISTER_ACTIVATION_OP_MAKER(Log1p, Log1pDoc);
REGISTER_ACTIVATION_OP_MAKER(Square, SquareDoc);
REGISTER_ACTIVATION_OP_MAKER(Softsign, SoftsignDoc);

Expand Down Expand Up @@ -1093,73 +919,6 @@ DECLARE_INPLACE_OP_INFERER(ActivationDoubleGradOpInplaceInferer,
DECLARE_INPLACE_OP_INFERER(ActivationTripleGradOpInplaceInferer,
{"DDX", "D_DOut"});

class LogitOp : public framework::OperatorWithKernel {
public:
LogitOp(const std::string& type,
const framework::VariableNameMap& inputs,
const framework::VariableNameMap& outputs,
const framework::AttributeMap& attrs)
: OperatorWithKernel(type, inputs, outputs, attrs) {}

void InferShape(framework::InferShapeContext* ctx) const override {
PADDLE_ENFORCE_EQ(ctx->HasInput("X"),
true,
platform::errors::InvalidArgument(
"Input(%s) of LogitOp should not be null.", "X"));
PADDLE_ENFORCE_EQ(ctx->HasOutput("Out"),
true,
platform::errors::InvalidArgument(
"Output(%s) of LogitOp should not be null.", "Out"));

ctx->ShareDim("X", /*->*/ "Out");
ctx->ShareLoD("X", /*->*/ "Out");
}

protected:
framework::OpKernelType GetExpectedKernelType(
const framework::ExecutionContext& ctx) const override {
framework::LibraryType library{framework::LibraryType::kPlain};
phi::DataLayout layout = phi::DataLayout::kAnyLayout;
auto data_type = OperatorWithKernel::IndicateVarDataType(ctx, "X");

return framework::OpKernelType(data_type, ctx.GetPlace(), layout, library);
}
};

class LogitGradOp : public framework::OperatorWithKernel {
public:
using framework::OperatorWithKernel::OperatorWithKernel;

void InferShape(framework::InferShapeContext* ctx) const override {
PADDLE_ENFORCE_EQ(
ctx->HasInput(framework::GradVarName("Out")),
true,
platform::errors::InvalidArgument(
"Input(%s) of LogitGradOp should not be null.", "DOut"));
PADDLE_ENFORCE_EQ(ctx->HasInput("X"),
true,
platform::errors::InvalidArgument(
"Input(%s) of LogitGradOp should not be null.", "X"));
PADDLE_ENFORCE_EQ(
ctx->HasOutput(framework::GradVarName("X")),
true,
platform::errors::InvalidArgument(
"Output(%s) of LogitGradOp should not be null.", "DX"));
auto x_grad_name = framework::GradVarName("X");
ctx->SetOutputDim(x_grad_name, ctx->GetInputDim("X"));
ctx->ShareLoD("X", /*->*/ x_grad_name);
}

protected:
framework::OpKernelType GetExpectedKernelType(
const framework::ExecutionContext& ctx) const override {
framework::LibraryType library{framework::LibraryType::kPlain};
phi::DataLayout layout = phi::DataLayout::kAnyLayout;
auto data_type = OperatorWithKernel::IndicateVarDataType(ctx, "X");
return framework::OpKernelType(data_type, ctx.GetPlace(), layout, library);
}
};

template <typename T>
class PowGradOpMaker : public framework::SingleGradOpMaker<T> {
public:
Expand Down Expand Up @@ -1273,10 +1032,6 @@ REGISTER_ACTIVATION_OP(thresholded_relu,
ThresholdedReluFunctor,
ThresholdedReluGradFunctor);
REGISTER_ACTIVATION_OP(relu6, Relu6, Relu6Functor, Relu6GradFunctor);
REGISTER_ACTIVATION_OP(hard_shrink,
HardShrink,
HardShrinkFunctor,
HardShrinkGradFunctor);
REGISTER_ACTIVATION_OP(softshrink,
SoftShrink,
SoftShrinkFunctor,
Expand All @@ -1285,42 +1040,21 @@ REGISTER_ACTIVATION_OP(tanh_shrink,
TanhShrink,
TanhShrinkFunctor,
TanhShrinkGradFunctor);
REGISTER_ACTIVATION_OP(silu, Silu, SiluFunctor, SiluGradFunctor);
REGISTER_ACTIVATION_OP(softsign,
Softsign,
SoftsignFunctor,
SoftsignGradFunctor);
REGISTER_ACTIVATION_OP(hard_sigmoid,
HardSigmoid,
HardSigmoidFunctor,
HardSigmoidGradFunctor);
REGISTER_ACTIVATION_OP(logsigmoid,
LogSigmoid,
LogSigmoidFunctor,
LogSigmoidGradFunctor);
REGISTER_ACTIVATION_OP(expm1, Expm1, Expm1Functor, Expm1GradFunctor);
REGISTER_ACTIVATION_OP(softplus,
Softplus,
SoftplusFunctor,
SoftplusGradFunctor);
REGISTER_ACTIVATION_OP(mish, Mish, MishFunctor, MishGradFunctor);
REGISTER_ACTIVATION_OP(stanh, STanh, STanhFunctor, STanhGradFunctor);
REGISTER_ACTIVATION_OP(reciprocal,
Reciprocal,
ReciprocalFunctor,
ReciprocalGradFunctor);

REGISTER_ACTIVATION_OP(log2, Log2, Log2Functor, Log2GradFunctor);
REGISTER_ACTIVATION_OP(log10, Log10, Log10Functor, Log10GradFunctor);
REGISTER_ACTIVATION_OP(log1p, Log1p, Log1pFunctor, Log1pGradFunctor);
REGISTER_ACTIVATION_OP(hard_swish,
HardSwish,
HardSwishFunctor,
HardSwishGradFunctor);
REGISTER_ACTIVATION_OP(swish, Swish, SwishFunctor, SwishGradFunctor);
REGISTER_ACTIVATION_OP(round, Round, RoundFunctor, ZeroGradFunctor);
REGISTER_ACTIVATION_OP(floor, Floor, FloorFunctor, ZeroGradFunctor);
REGISTER_ACTIVATION_OP(ceil, Ceil, CeilFunctor, ZeroGradFunctor);

/* ========================== sigmoid register =============================
*/
Expand Down Expand Up @@ -1459,17 +1193,6 @@ REGISTER_OPERATOR(

/* ========================================================================== */

/* ======================== logit register ============================
*/
REGISTER_OPERATOR(logit,
ops::LogitOp,
ops::LogitOpMaker,
ops::LogitGradOpMaker<paddle::framework::OpDesc>,
ops::LogitGradOpMaker<paddle::imperative::OpBase>);
REGISTER_OPERATOR(logit_grad, ops::LogitGradOp);

/* ========================================================================== */

/* ======================== celu register ============================
*/
REGISTER_OPERATOR(
Expand Down
Loading